From caf86765f53f854035f55bb8109a6143e9e6d33c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 23 Jun 2021 01:39:50 +0900 Subject: [PATCH] feat: build for release --- build.json | 2 +- node_modules/.yarn-integrity | 752 +- node_modules/@actions/core/README.md | 17 +- node_modules/@actions/core/lib/command.js | 17 +- node_modules/@actions/core/lib/command.js.map | 2 +- node_modules/@actions/core/lib/core.d.ts | 26 +- node_modules/@actions/core/lib/core.js | 71 +- node_modules/@actions/core/lib/core.js.map | 2 +- .../@actions/core/lib/file-command.js | 17 +- .../@actions/core/lib/file-command.js.map | 2 +- node_modules/@actions/core/lib/utils.js | 1 + node_modules/@actions/core/lib/utils.js.map | 2 +- node_modules/@actions/core/package.json | 2 +- .../LICENSE => @actions/github/LICENSE.md} | 4 +- node_modules/@actions/github/README.md | 6 +- node_modules/@actions/github/lib/context.d.ts | 3 + node_modules/@actions/github/lib/context.js | 4 + .../@actions/github/lib/context.js.map | 2 +- node_modules/@actions/github/lib/github.js | 2 +- .../@actions/github/lib/internal/utils.js | 2 +- node_modules/@actions/github/lib/utils.d.ts | 9 +- node_modules/@actions/github/lib/utils.js | 2 +- .../plugin-rest-endpoint-methods/README.md | 74 - .../dist-node/index.js | 1229 - .../dist-node/index.js.map | 1 - .../dist-src/endpoints-to-methods.js | 60 - .../dist-src/generated/endpoints.js | 1405 - .../dist-src/generated/method-types.js | 1 - .../parameters-and-response-types.js | 1 - .../dist-src/index.js | 11 - .../dist-src/types.js | 1 - .../dist-src/version.js | 1 - .../dist-types/endpoints-to-methods.d.ts | 4 - .../dist-types/generated/endpoints.d.ts | 3 - .../dist-types/generated/method-types.d.ts | 7826 ---- .../parameters-and-response-types.d.ts | 2629 -- .../dist-types/index.d.ts | 7 - .../dist-types/types.d.ts | 18 - .../dist-types/version.d.ts | 1 - .../dist-web/index.js | 1479 - .../dist-web/index.js.map | 1 - .../plugin-rest-endpoint-methods/package.json | 60 - node_modules/@actions/github/package.json | 18 +- node_modules/@nodelib/fs.scandir/.eslintcache | 1 - .../@nodelib/fs.scandir/out/adapters/fs.d.ts | 33 +- .../fs.scandir/out/adapters/fs.d.ts.map | 1 - .../@nodelib/fs.scandir/out/adapters/fs.js | 38 +- .../fs.scandir/out/adapters/fs.spec.d.ts | 2 - .../fs.scandir/out/adapters/fs.spec.d.ts.map | 1 - .../fs.scandir/out/adapters/fs.spec.js | 20 - .../@nodelib/fs.scandir/out/constants.d.ts | 9 +- .../fs.scandir/out/constants.d.ts.map | 1 - .../@nodelib/fs.scandir/out/constants.js | 31 +- .../@nodelib/fs.scandir/out/index.d.ts | 25 +- .../@nodelib/fs.scandir/out/index.d.ts.map | 1 - node_modules/@nodelib/fs.scandir/out/index.js | 51 +- .../@nodelib/fs.scandir/out/index.spec.d.ts | 2 - .../fs.scandir/out/index.spec.d.ts.map | 1 - .../@nodelib/fs.scandir/out/index.spec.js | 71 - .../fs.scandir/out/providers/async.d.ts | 15 +- .../fs.scandir/out/providers/async.d.ts.map | 1 - .../fs.scandir/out/providers/async.js | 196 +- .../fs.scandir/out/providers/async.spec.d.ts | 2 - .../out/providers/async.spec.d.ts.map | 1 - .../fs.scandir/out/providers/async.spec.js | 180 - .../fs.scandir/out/providers/common.d.ts | 3 +- .../fs.scandir/out/providers/common.d.ts.map | 1 - .../fs.scandir/out/providers/common.js | 26 +- .../fs.scandir/out/providers/common.spec.d.ts | 2 - .../out/providers/common.spec.d.ts.map | 1 - .../fs.scandir/out/providers/common.spec.js | 22 - .../fs.scandir/out/providers/sync.d.ts | 11 +- .../fs.scandir/out/providers/sync.d.ts.map | 1 - .../@nodelib/fs.scandir/out/providers/sync.js | 108 +- .../fs.scandir/out/providers/sync.spec.d.ts | 2 - .../out/providers/sync.spec.d.ts.map | 1 - .../fs.scandir/out/providers/sync.spec.js | 144 - .../@nodelib/fs.scandir/out/settings.d.ts | 41 +- .../@nodelib/fs.scandir/out/settings.d.ts.map | 1 - .../@nodelib/fs.scandir/out/settings.js | 48 +- .../fs.scandir/out/settings.spec.d.ts | 2 - .../fs.scandir/out/settings.spec.d.ts.map | 1 - .../@nodelib/fs.scandir/out/settings.spec.js | 27 - .../@nodelib/fs.scandir/out/types/index.d.ts | 40 +- .../fs.scandir/out/types/index.d.ts.map | 1 - .../@nodelib/fs.scandir/out/types/index.js | 4 +- .../@nodelib/fs.scandir/out/utils/fs.d.ts | 5 +- .../@nodelib/fs.scandir/out/utils/fs.d.ts.map | 1 - .../@nodelib/fs.scandir/out/utils/fs.js | 38 +- .../fs.scandir/out/utils/fs.spec.d.ts | 2 - .../fs.scandir/out/utils/fs.spec.d.ts.map | 1 - .../@nodelib/fs.scandir/out/utils/fs.spec.js | 20 - .../@nodelib/fs.scandir/out/utils/index.d.ts | 5 +- .../fs.scandir/out/utils/index.d.ts.map | 1 - .../@nodelib/fs.scandir/out/utils/index.js | 10 +- node_modules/@nodelib/fs.scandir/package.json | 15 +- .../fs.scandir/src/adapters/fs.spec.ts | 31 - .../@nodelib/fs.scandir/src/adapters/fs.ts | 30 - .../@nodelib/fs.scandir/src/constants.ts | 15 - .../@nodelib/fs.scandir/src/index.spec.ts | 85 - node_modules/@nodelib/fs.scandir/src/index.ts | 51 - .../fs.scandir/src/providers/async.spec.ts | 236 - .../fs.scandir/src/providers/async.ts | 121 - .../fs.scandir/src/providers/common.spec.ts | 24 - .../fs.scandir/src/providers/common.ts | 10 - .../fs.scandir/src/providers/sync.spec.ts | 186 - .../@nodelib/fs.scandir/src/providers/sync.ts | 62 - .../@nodelib/fs.scandir/src/settings.spec.ts | 32 - .../@nodelib/fs.scandir/src/settings.ts | 33 - .../@nodelib/fs.scandir/src/types/index.ts | 21 - .../@nodelib/fs.scandir/src/utils/fs.spec.ts | 21 - .../@nodelib/fs.scandir/src/utils/fs.ts | 27 - .../@nodelib/fs.scandir/src/utils/index.ts | 5 - .../@nodelib/fs.scandir/tsconfig.json | 15 - .../@nodelib/fs.scandir/tsconfig.tsbuildinfo | 1720 - node_modules/@nodelib/fs.stat/.eslintcache | 1 - .../@nodelib/fs.stat/out/adapters/fs.d.ts | 24 +- .../@nodelib/fs.stat/out/adapters/fs.d.ts.map | 1 - .../@nodelib/fs.stat/out/adapters/fs.js | 34 +- .../fs.stat/out/adapters/fs.spec.d.ts | 2 - .../fs.stat/out/adapters/fs.spec.d.ts.map | 1 - .../@nodelib/fs.stat/out/adapters/fs.spec.js | 20 - node_modules/@nodelib/fs.stat/out/index.d.ts | 25 +- .../@nodelib/fs.stat/out/index.d.ts.map | 1 - node_modules/@nodelib/fs.stat/out/index.js | 51 +- .../@nodelib/fs.stat/out/index.spec.d.ts | 2 - .../@nodelib/fs.stat/out/index.spec.d.ts.map | 1 - .../@nodelib/fs.stat/out/index.spec.js | 56 - .../@nodelib/fs.stat/out/providers/async.d.ts | 9 +- .../fs.stat/out/providers/async.d.ts.map | 1 - .../@nodelib/fs.stat/out/providers/async.js | 68 +- .../fs.stat/out/providers/async.spec.d.ts | 2 - .../fs.stat/out/providers/async.spec.d.ts.map | 1 - .../fs.stat/out/providers/async.spec.js | 83 - .../@nodelib/fs.stat/out/providers/sync.d.ts | 7 +- .../fs.stat/out/providers/sync.d.ts.map | 1 - .../@nodelib/fs.stat/out/providers/sync.js | 46 +- .../fs.stat/out/providers/sync.spec.d.ts | 2 - .../fs.stat/out/providers/sync.spec.d.ts.map | 1 - .../fs.stat/out/providers/sync.spec.js | 66 - .../@nodelib/fs.stat/out/settings.d.ts | 33 +- .../@nodelib/fs.stat/out/settings.d.ts.map | 1 - node_modules/@nodelib/fs.stat/out/settings.js | 32 +- .../@nodelib/fs.stat/out/settings.spec.d.ts | 2 - .../fs.stat/out/settings.spec.d.ts.map | 1 - .../@nodelib/fs.stat/out/settings.spec.js | 26 - .../@nodelib/fs.stat/out/types/index.d.ts | 9 +- .../@nodelib/fs.stat/out/types/index.d.ts.map | 1 - .../@nodelib/fs.stat/out/types/index.js | 4 +- node_modules/@nodelib/fs.stat/package.json | 12 +- .../@nodelib/fs.stat/src/adapters/fs.spec.ts | 31 - .../@nodelib/fs.stat/src/adapters/fs.ts | 26 - .../@nodelib/fs.stat/src/index.spec.ts | 70 - node_modules/@nodelib/fs.stat/src/index.ts | 50 - .../fs.stat/src/providers/async.spec.ts | 102 - .../@nodelib/fs.stat/src/providers/async.ts | 43 - .../fs.stat/src/providers/sync.spec.ts | 90 - .../@nodelib/fs.stat/src/providers/sync.ts | 26 - .../@nodelib/fs.stat/src/settings.spec.ts | 31 - node_modules/@nodelib/fs.stat/src/settings.ts | 21 - .../@nodelib/fs.stat/src/types/index.ts | 4 - node_modules/@nodelib/fs.stat/tsconfig.json | 12 - .../@nodelib/fs.stat/tsconfig.tsbuildinfo | 1570 - node_modules/@nodelib/fs.walk/.eslintcache | 1 - node_modules/@nodelib/fs.walk/out/index.d.ts | 29 +- .../@nodelib/fs.walk/out/index.d.ts.map | 1 - node_modules/@nodelib/fs.walk/out/index.js | 67 +- .../@nodelib/fs.walk/out/index.spec.d.ts | 2 - .../@nodelib/fs.walk/out/index.spec.d.ts.map | 1 - .../@nodelib/fs.walk/out/index.spec.js | 99 - .../@nodelib/fs.walk/out/providers/async.d.ts | 25 +- .../fs.walk/out/providers/async.d.ts.map | 1 - .../@nodelib/fs.walk/out/providers/async.js | 60 +- .../fs.walk/out/providers/async.spec.d.ts | 2 - .../fs.walk/out/providers/async.spec.d.ts.map | 1 - .../fs.walk/out/providers/async.spec.js | 42 - .../@nodelib/fs.walk/out/providers/index.d.ts | 9 +- .../fs.walk/out/providers/index.d.ts.map | 1 - .../@nodelib/fs.walk/out/providers/index.js | 18 +- .../fs.walk/out/providers/stream.d.ts | 25 +- .../fs.walk/out/providers/stream.d.ts.map | 1 - .../@nodelib/fs.walk/out/providers/stream.js | 68 +- .../fs.walk/out/providers/stream.spec.d.ts | 2 - .../out/providers/stream.spec.d.ts.map | 1 - .../fs.walk/out/providers/stream.spec.js | 62 - .../@nodelib/fs.walk/out/providers/sync.d.ts | 21 +- .../fs.walk/out/providers/sync.d.ts.map | 1 - .../@nodelib/fs.walk/out/providers/sync.js | 28 +- .../fs.walk/out/providers/sync.spec.d.ts | 2 - .../fs.walk/out/providers/sync.spec.d.ts.map | 1 - .../fs.walk/out/providers/sync.spec.js | 27 - .../@nodelib/fs.walk/out/readers/async.d.ts | 61 +- .../fs.walk/out/readers/async.d.ts.map | 1 - .../@nodelib/fs.walk/out/readers/async.js | 193 +- .../fs.walk/out/readers/async.spec.d.ts | 2 - .../fs.walk/out/readers/async.spec.d.ts.map | 1 - .../fs.walk/out/readers/async.spec.js | 165 - .../@nodelib/fs.walk/out/readers/common.d.ts | 14 +- .../fs.walk/out/readers/common.d.ts.map | 1 - .../@nodelib/fs.walk/out/readers/common.js | 62 +- .../fs.walk/out/readers/common.spec.d.ts | 2 - .../fs.walk/out/readers/common.spec.d.ts.map | 1 - .../fs.walk/out/readers/common.spec.js | 85 - .../@nodelib/fs.walk/out/readers/reader.d.ts | 13 +- .../fs.walk/out/readers/reader.d.ts.map | 1 - .../@nodelib/fs.walk/out/readers/reader.js | 22 +- .../fs.walk/out/readers/reader.spec.d.ts | 2 - .../fs.walk/out/readers/reader.spec.d.ts.map | 1 - .../fs.walk/out/readers/reader.spec.js | 25 - .../@nodelib/fs.walk/out/readers/sync.d.ts | 31 +- .../fs.walk/out/readers/sync.d.ts.map | 1 - .../@nodelib/fs.walk/out/readers/sync.js | 118 +- .../fs.walk/out/readers/sync.spec.d.ts | 2 - .../fs.walk/out/readers/sync.spec.d.ts.map | 1 - .../@nodelib/fs.walk/out/readers/sync.spec.js | 89 - .../@nodelib/fs.walk/out/settings.d.ts | 61 +- .../@nodelib/fs.walk/out/settings.d.ts.map | 1 - node_modules/@nodelib/fs.walk/out/settings.js | 52 +- .../@nodelib/fs.walk/out/settings.spec.d.ts | 2 - .../fs.walk/out/settings.spec.d.ts.map | 1 - .../@nodelib/fs.walk/out/settings.spec.js | 28 - .../@nodelib/fs.walk/out/tests/index.d.ts | 16 - .../@nodelib/fs.walk/out/tests/index.d.ts.map | 1 - .../@nodelib/fs.walk/out/tests/index.js | 34 - .../@nodelib/fs.walk/out/types/index.d.ts | 17 +- .../@nodelib/fs.walk/out/types/index.d.ts.map | 1 - .../@nodelib/fs.walk/out/types/index.js | 4 +- node_modules/@nodelib/fs.walk/package.json | 15 +- .../@nodelib/fs.walk/src/index.spec.ts | 129 - node_modules/@nodelib/fs.walk/src/index.ts | 63 - .../fs.walk/src/providers/async.spec.ts | 57 - .../@nodelib/fs.walk/src/providers/async.ts | 40 - .../@nodelib/fs.walk/src/providers/index.ts | 9 - .../fs.walk/src/providers/stream.spec.ts | 90 - .../@nodelib/fs.walk/src/providers/stream.ts | 36 - .../fs.walk/src/providers/sync.spec.ts | 34 - .../@nodelib/fs.walk/src/providers/sync.ts | 13 - .../fs.walk/src/readers/async.spec.ts | 232 - .../@nodelib/fs.walk/src/readers/async.ts | 126 - .../fs.walk/src/readers/common.spec.ts | 114 - .../@nodelib/fs.walk/src/readers/common.ts | 33 - .../fs.walk/src/readers/reader.spec.ts | 30 - .../@nodelib/fs.walk/src/readers/reader.ts | 8 - .../@nodelib/fs.walk/src/readers/sync.spec.ts | 126 - .../@nodelib/fs.walk/src/readers/sync.ts | 69 - .../@nodelib/fs.walk/src/settings.spec.ts | 33 - node_modules/@nodelib/fs.walk/src/settings.ts | 46 - .../@nodelib/fs.walk/src/tests/index.ts | 40 - .../@nodelib/fs.walk/src/types/index.ts | 9 - node_modules/@nodelib/fs.walk/tsconfig.json | 15 - .../@nodelib/fs.walk/tsconfig.tsbuildinfo | 1894 - node_modules/@octokit/core/README.md | 2 +- node_modules/@octokit/core/dist-node/index.js | 5 +- .../@octokit/core/dist-node/index.js.map | 2 +- .../@octokit/core/dist-src/version.js | 2 +- .../@octokit/core/dist-types/types.d.ts | 2 +- .../@octokit/core/dist-types/version.d.ts | 2 +- node_modules/@octokit/core/dist-web/index.js | 2 +- .../@octokit/core/dist-web/index.js.map | 2 +- node_modules/@octokit/core/package.json | 10 +- .../@octokit/endpoint/dist-node/index.js | 2 +- .../@octokit/endpoint/dist-node/index.js.map | 2 +- .../@octokit/endpoint/dist-src/version.js | 2 +- .../@octokit/endpoint/dist-types/version.d.ts | 2 +- .../@octokit/endpoint/dist-web/index.js | 2 +- .../@octokit/endpoint/dist-web/index.js.map | 2 +- node_modules/@octokit/endpoint/package.json | 17 +- node_modules/@octokit/graphql/README.md | 2 +- .../@octokit/graphql/dist-node/index.js | 2 +- .../@octokit/graphql/dist-node/index.js.map | 2 +- .../@octokit/graphql/dist-src/version.js | 2 +- .../@octokit/graphql/dist-types/version.d.ts | 2 +- .../@octokit/graphql/dist-web/index.js | 2 +- .../@octokit/graphql/dist-web/index.js.map | 2 +- node_modules/@octokit/graphql/package.json | 10 +- .../@octokit/openapi-types/dist-node/index.js | 2 +- .../openapi-types/dist-node/index.js.map | 2 +- .../openapi-types/dist-src/version.js | 2 +- .../dist-types/generated/types.d.ts | 486 +- .../openapi-types/dist-types/version.d.ts | 2 +- .../@octokit/openapi-types/dist-web/index.js | 2 +- .../openapi-types/dist-web/index.js.map | 2 +- .../@octokit/openapi-types/package.json | 6 +- .../@octokit/plugin-paginate-rest/README.md | 5 +- .../plugin-paginate-rest/dist-node/index.js | 101 +- .../dist-node/index.js.map | 2 +- .../plugin-paginate-rest/dist-src/iterator.js | 28 +- .../normalize-paginated-list-response.js | 7 + .../plugin-paginate-rest/dist-src/version.js | 2 +- .../generated/paginating-endpoints.d.ts | 98 +- .../dist-types/iterator.d.ts | 7 + .../dist-types/types.d.ts | 24 +- .../dist-types/version.d.ts | 2 +- .../plugin-paginate-rest/dist-web/index.js | 37 +- .../dist-web/index.js.map | 2 +- .../plugin-paginate-rest/package.json | 12 +- .../plugin-rest-endpoint-methods/README.md | 6 +- .../dist-node/index.js | 59 +- .../dist-node/index.js.map | 2 +- .../dist-src/generated/endpoints.js | 19 + .../dist-src/version.js | 2 +- .../dist-types/generated/method-types.d.ts | 127 +- .../parameters-and-response-types.d.ts | 20 + .../dist-types/version.d.ts | 2 +- .../dist-web/index.js | 21 +- .../dist-web/index.js.map | 2 +- .../plugin-rest-endpoint-methods/package.json | 10 +- node_modules/@octokit/request-error/README.md | 2 +- .../@octokit/request-error/dist-node/index.js | 37 +- .../request-error/dist-node/index.js.map | 2 +- .../@octokit/request-error/dist-src/index.js | 31 +- .../request-error/dist-types/index.d.ts | 12 +- .../request-error/dist-types/types.d.ts | 6 +- .../@octokit/request-error/dist-web/index.js | 31 +- .../request-error/dist-web/index.js.map | 2 +- .../@octokit/request-error/package.json | 17 +- node_modules/@octokit/request/README.md | 12 +- .../@octokit/request/dist-node/index.js | 97 +- .../@octokit/request/dist-node/index.js.map | 2 +- .../request/dist-src/fetch-wrapper.js | 87 +- .../@octokit/request/dist-src/version.js | 2 +- .../@octokit/request/dist-types/version.d.ts | 2 +- .../@octokit/request/dist-web/index.js | 89 +- .../@octokit/request/dist-web/index.js.map | 2 +- .../node_modules/is-plain-object/LICENSE | 21 - .../node_modules/is-plain-object/README.md | 125 - .../is-plain-object/dist/is-plain-object.js | 38 - .../is-plain-object/dist/is-plain-object.mjs | 34 - .../is-plain-object/is-plain-object.d.ts | 1 - .../node_modules/is-plain-object/package.json | 85 - node_modules/@octokit/request/package.json | 12 +- node_modules/@octokit/types/README.md | 3 +- .../@octokit/types/dist-node/index.js | 2 +- .../@octokit/types/dist-src/VERSION.js | 2 +- .../@octokit/types/dist-types/VERSION.d.ts | 2 +- .../types/dist-types/generated/Endpoints.d.ts | 16 + node_modules/@octokit/types/dist-web/index.js | 2 +- node_modules/@octokit/types/package.json | 6 +- .../anchor-markdown-header/package.json | 24 +- .../@technote-space/doctoc/package.json | 30 +- .../filter-github-action/package.json | 24 +- .../github-action-helper/dist/api-helper.js | 34 +- .../github-action-helper/dist/types.d.ts | 4 +- .../github-action-helper/package.json | 24 +- .../github-action-log-helper/package.json | 28 +- .../dist/utils/command.js | 8 +- .../dist/utils/misc.js | 6 +- .../dist/utils/process.js | 2 +- .../@octokit/openapi-types/README.md | 9 - .../@octokit/openapi-types/dist-node/index.js | 8 - .../openapi-types/dist-node/index.js.map | 1 - .../openapi-types/dist-src/generated/types.js | 5 - .../@octokit/openapi-types/dist-src/index.js | 2 - .../openapi-types/dist-src/version.js | 1 - .../dist-types/generated/types.d.ts | 31583 ---------------- .../openapi-types/dist-types/index.d.ts | 2 - .../openapi-types/dist-types/version.d.ts | 1 - .../@octokit/openapi-types/dist-web/index.js | 4 - .../openapi-types/dist-web/index.js.map | 1 - .../@octokit/openapi-types/package.json | 36 - .../github-action-pr-helper/package.json | 28 +- .../@textlint/ast-node-types/CHANGELOG.md | 378 - .../@textlint/ast-node-types/lib/index.js.map | 1 - .../lib/{ => src}/TypeofTxtNode.d.ts | 0 .../lib/{ => src}/TypeofTxtNode.js | 0 .../lib/{ => src}/TypeofTxtNode.js.map | 2 +- .../ast-node-types/lib/{ => src}/index.d.ts | 0 .../ast-node-types/lib/{ => src}/index.js | 0 .../ast-node-types/lib/src/index.js.map | 1 + .../ast-node-types/module/index.js.map | 1 - .../module/{ => src}/TypeofTxtNode.d.ts | 0 .../module/{ => src}/TypeofTxtNode.js | 0 .../module/{ => src}/TypeofTxtNode.js.map | 2 +- .../module/{ => src}/index.d.ts | 0 .../ast-node-types/module/{ => src}/index.js | 0 .../ast-node-types/module/src/index.js.map | 1 + .../@textlint/ast-node-types/package.json | 19 +- .../@textlint/markdown-to-ast/CHANGELOG.md | 561 - .../markdown-to-ast/lib/index.js.map | 1 - .../lib/mapping/markdown-syntax-map.js.map | 1 - .../markdown-to-ast/lib/{ => src}/index.d.ts | 0 .../markdown-to-ast/lib/{ => src}/index.js | 45 +- .../markdown-to-ast/lib/src/index.js.map | 1 + .../mapping/markdown-syntax-map.d.ts | 1 + .../{ => src}/mapping/markdown-syntax-map.js | 3 +- .../src/mapping/markdown-syntax-map.js.map | 1 + .../lib/src/parse-markdown.d.ts | 1 + .../markdown-to-ast/lib/src/parse-markdown.js | 26 + .../lib/src/parse-markdown.js.map | 1 + .../markdown-to-ast/module/index.js.map | 1 - .../module/mapping/markdown-syntax-map.js.map | 1 - .../module/{ => src}/index.d.ts | 0 .../markdown-to-ast/module/{ => src}/index.js | 38 +- .../markdown-to-ast/module/src/index.js.map | 1 + .../mapping/markdown-syntax-map.d.ts | 1 + .../{ => src}/mapping/markdown-syntax-map.js | 3 +- .../src/mapping/markdown-syntax-map.js.map | 1 + .../module/src/parse-markdown.d.ts | 1 + .../module/src/parse-markdown.js | 19 + .../module/src/parse-markdown.js.map | 1 + .../@textlint/markdown-to-ast/package.json | 37 +- .../@textlint/markdown-to-ast/src/index.ts | 31 +- .../src/mapping/markdown-syntax-map.ts | 1 + .../markdown-to-ast/src/parse-markdown.ts | 19 + node_modules/@types/mdast/LICENSE | 21 + node_modules/@types/mdast/README.md | 16 + node_modules/@types/mdast/index.d.ts | 217 + node_modules/@types/mdast/package.json | 26 + node_modules/@types/unist/LICENSE | 21 + node_modules/@types/unist/README.md | 16 + node_modules/@types/unist/index.d.ts | 98 + node_modules/@types/unist/package.json | 43 + node_modules/before-after-hook/README.md | 6 +- node_modules/before-after-hook/package.json | 2 +- node_modules/boundary/README.md | 74 - node_modules/boundary/lib/index.js | 83 - node_modules/boundary/package.json | 50 - node_modules/ccount/index.js | 22 + .../{collapse-white-space => ccount}/license | 0 .../package.json | 47 +- node_modules/ccount/readme.md | 68 + node_modules/collapse-white-space/index.js | 8 - node_modules/collapse-white-space/readme.md | 58 - node_modules/dom-serializer/README.md | 19 +- node_modules/dom-serializer/lib/index.d.ts | 6 +- .../dom-serializer/lib/index.d.ts.map | 2 +- node_modules/dom-serializer/lib/index.js | 3 +- node_modules/dom-serializer/package.json | 26 +- node_modules/domutils/lib/stringify.d.ts | 19 +- node_modules/domutils/lib/stringify.d.ts.map | 2 +- node_modules/domutils/lib/stringify.js | 45 +- node_modules/domutils/package.json | 10 +- node_modules/escape-string-regexp/index.d.ts | 18 + node_modules/escape-string-regexp/index.js | 13 + .../LICENSE => escape-string-regexp/license} | 6 +- .../escape-string-regexp/package.json | 38 + node_modules/escape-string-regexp/readme.md | 34 + node_modules/inherits/LICENSE | 16 - node_modules/inherits/README.md | 42 - node_modules/inherits/inherits.js | 9 - node_modules/inherits/inherits_browser.js | 27 - node_modules/inherits/package.json | 29 - node_modules/is-buffer/README.md | 1 + node_modules/is-buffer/index.d.ts | 2 + node_modules/is-buffer/index.js | 16 +- node_modules/is-buffer/package.json | 52 +- node_modules/is-buffer/test/basic.js | 24 - .../node_modules => }/is-number/LICENSE | 0 .../node_modules => }/is-number/README.md | 0 .../node_modules => }/is-number/index.js | 0 .../node_modules => }/is-number/package.json | 0 node_modules/is-plain-obj/index.d.ts | 29 + node_modules/is-plain-obj/index.js | 11 +- node_modules/is-plain-obj/license | 20 +- node_modules/is-plain-obj/package.json | 70 +- node_modules/is-plain-obj/readme.md | 33 +- .../node_modules => }/is-plain-object/LICENSE | 0 .../is-plain-object/README.md | 0 .../is-plain-object/dist/is-plain-object.js | 0 .../is-plain-object/dist/is-plain-object.mjs | 0 .../is-plain-object/is-plain-object.d.ts | 0 .../is-plain-object/package.json | 0 node_modules/is-whitespace-character/index.js | 14 - .../is-whitespace-character/package.json | 74 - .../is-whitespace-character/readme.md | 74 - node_modules/is-word-character/index.js | 14 - node_modules/is-word-character/readme.md | 72 - node_modules/longest-streak/index.js | 36 + .../license | 0 .../package.json | 39 +- node_modules/longest-streak/readme.md | 72 + node_modules/markdown-escapes/index.js | 57 - node_modules/markdown-escapes/package.json | 72 - node_modules/markdown-escapes/readme.md | 80 - node_modules/markdown-table/index.js | 249 + .../{state-toggle => markdown-table}/license | 2 +- .../package.json | 50 +- node_modules/markdown-table/readme.md | 259 + .../mdast-util-find-and-replace/index.js | 180 + .../license | 2 +- .../mdast-util-find-and-replace/package.json | 76 + .../mdast-util-find-and-replace/readme.md | 187 + .../mdast-util-footnote/from-markdown.js | 69 + node_modules/mdast-util-footnote/index.js | 2 + .../license | 2 +- node_modules/mdast-util-footnote/package.json | 76 + node_modules/mdast-util-footnote/readme.md | 270 + .../mdast-util-footnote/to-markdown.js | 66 + .../mdast-util-from-markdown/dist/index.js | 823 + .../mdast-util-from-markdown/index.js | 3 + .../mdast-util-from-markdown/lib/index.js | 819 + .../license | 2 +- .../mdast-util-from-markdown/package.json | 109 + .../mdast-util-from-markdown/readme.md | 206 + .../mdast-util-from-markdown/types/index.d.ts | 34 + .../mdast-util-frontmatter/from-markdown.js | 40 + node_modules/mdast-util-frontmatter/index.js | 2 + node_modules/mdast-util-frontmatter/license | 22 + .../mdast-util-frontmatter/package.json | 76 + node_modules/mdast-util-frontmatter/readme.md | 186 + .../mdast-util-frontmatter/to-markdown.js | 46 + .../from-markdown.js | 157 + .../mdast-util-gfm-autolink-literal/index.js | 2 + .../mdast-util-gfm-autolink-literal/license | 22 + .../package.json | 90 + .../mdast-util-gfm-autolink-literal/readme.md | 188 + .../to-markdown.js | 26 + .../from-markdown.js | 11 + .../mdast-util-gfm-strikethrough/index.js | 2 + .../mdast-util-gfm-strikethrough/license | 22 + .../mdast-util-gfm-strikethrough/package.json | 80 + .../mdast-util-gfm-strikethrough/readme.md | 173 + .../to-markdown.js | 17 + .../mdast-util-gfm-table/from-markdown.js | 53 + node_modules/mdast-util-gfm-table/index.js | 2 + node_modules/mdast-util-gfm-table/license | 22 + .../mdast-util-gfm-table/package.json | 84 + node_modules/mdast-util-gfm-table/readme.md | 231 + .../mdast-util-gfm-table/to-markdown.js | 112 + .../from-markdown.js | 50 + .../mdast-util-gfm-task-list-item/index.js | 2 + .../mdast-util-gfm-task-list-item/license | 22 + .../package.json | 83 + .../mdast-util-gfm-task-list-item/readme.md | 226 + .../to-markdown.js | 22 + node_modules/mdast-util-gfm/from-markdown.js | 42 + node_modules/mdast-util-gfm/index.js | 2 + node_modules/mdast-util-gfm/license | 22 + node_modules/mdast-util-gfm/package.json | 93 + node_modules/mdast-util-gfm/readme.md | 320 + node_modules/mdast-util-gfm/to-markdown.js | 22 + node_modules/mdast-util-to-markdown/index.js | 1 + .../mdast-util-to-markdown/lib/configure.js | 27 + .../lib/handle/blockquote.js | 15 + .../lib/handle/break.js | 20 + .../mdast-util-to-markdown/lib/handle/code.js | 64 + .../lib/handle/definition.js | 46 + .../lib/handle/emphasis.js | 21 + .../lib/handle/heading.js | 48 + .../mdast-util-to-markdown/lib/handle/html.js | 10 + .../lib/handle/image-reference.js | 37 + .../lib/handle/image.js | 53 + .../lib/handle/index.js | 20 + .../lib/handle/inline-code.js | 69 + .../lib/handle/link-reference.js | 38 + .../mdast-util-to-markdown/lib/handle/link.js | 70 + .../lib/handle/list-item.js | 47 + .../mdast-util-to-markdown/lib/handle/list.js | 10 + .../lib/handle/paragraph.js | 12 + .../mdast-util-to-markdown/lib/handle/root.js | 7 + .../lib/handle/strong.js | 21 + .../mdast-util-to-markdown/lib/handle/text.js | 7 + .../lib/handle/thematic-break.js | 14 + .../mdast-util-to-markdown/lib/index.js | 73 + .../mdast-util-to-markdown/lib/join.js | 37 + .../mdast-util-to-markdown/lib/unsafe.js | 110 + .../lib/util/association.js | 30 + .../lib/util/check-bullet.js | 15 + .../lib/util/check-emphasis.js | 15 + .../lib/util/check-fence.js | 15 + .../lib/util/check-list-item-indent.js | 19 + .../lib/util/check-quote.js | 15 + .../lib/util/check-rule-repeat.js | 15 + .../lib/util/check-rule.js | 15 + .../lib/util/check-strong.js | 15 + .../lib/util/container-flow.js | 47 + .../lib/util/container-phrasing.js | 57 + .../lib/util/format-code-as-indented.js | 14 + .../lib/util/format-heading-as-setext.js | 9 + .../lib/util/format-link-as-autolink.js | 26 + .../lib/util/indent-lines.js | 25 + .../lib/util/pattern-compile.js | 25 + .../lib/util/pattern-in-scope.js | 30 + .../mdast-util-to-markdown/lib/util/safe.js | 139 + node_modules/mdast-util-to-markdown/license | 22 + .../mdast-util-to-markdown/package.json | 96 + node_modules/mdast-util-to-markdown/readme.md | 312 + .../mdast-util-to-markdown/types/index.d.ts | 82 + node_modules/mdast-util-to-string/index.js | 29 + .../license | 0 .../mdast-util-to-string/package.json | 80 + node_modules/mdast-util-to-string/readme.md | 127 + .../mdast-util-to-string/types/index.d.ts | 8 + .../micromark-extension-footnote/html.js | 168 + .../micromark-extension-footnote/index.js | 442 + .../micromark-extension-footnote/license | 22 + .../micromark-extension-footnote/package.json | 73 + .../micromark-extension-footnote/readme.md | 211 + .../micromark-extension-frontmatter/html.js | 1 + .../micromark-extension-frontmatter/index.js | 1 + .../lib/html.js | 29 + .../lib/matters.js | 18 +- .../lib/syntax.js | 167 + .../micromark-extension-frontmatter/license | 22 + .../package.json | 72 + .../micromark-extension-frontmatter/readme.md | 203 + .../html.js | 28 + .../index.js | 1 + .../license | 22 + .../package.json | 76 + .../readme.md | 133 + .../syntax.js | 581 + .../html.js | 10 + .../index.js | 160 + .../license | 22 + .../package.json | 81 + .../readme.md | 135 + .../types/html.d.ts | 8 + .../types/index.d.ts | 26 + .../micromark-extension-gfm-table/html.js | 138 + .../micromark-extension-gfm-table/index.js | 1 + .../micromark-extension-gfm-table/license | 22 + .../package.json | 76 + .../micromark-extension-gfm-table/readme.md | 119 + .../micromark-extension-gfm-table/syntax.js | 576 + .../micromark-extension-gfm-tagfilter/html.js | 29 + .../index.js | 1 + .../micromark-extension-gfm-tagfilter/license | 22 + .../package.json | 71 + .../readme.md | 110 + .../html.js | 14 + .../index.js | 1 + .../license | 22 + .../package.json | 75 + .../readme.md | 124 + .../syntax.js | 80 + node_modules/micromark-extension-gfm/html.js | 8 + node_modules/micromark-extension-gfm/index.js | 1 + node_modules/micromark-extension-gfm/license | 22 + .../micromark-extension-gfm/package.json | 95 + .../micromark-extension-gfm/readme.md | 246 + .../micromark-extension-gfm/syntax.js | 11 + .../micromark-extension-gfm/types/html.d.ts | 8 + .../micromark-extension-gfm/types/index.d.ts | 19 + node_modules/micromark/buffer.d.ts | 5 + node_modules/micromark/buffer.js | 3 + node_modules/micromark/buffer.mjs | 1 + .../micromark/dist/character/ascii-alpha.js | 7 + .../dist/character/ascii-alphanumeric.js | 7 + .../micromark/dist/character/ascii-atext.js | 7 + .../micromark/dist/character/ascii-control.js | 12 + .../micromark/dist/character/ascii-digit.js | 7 + .../dist/character/ascii-hex-digit.js | 7 + .../dist/character/ascii-punctuation.js | 7 + .../micromark/dist/character/codes.d.ts | 3 + .../micromark/dist/character/codes.js | 257 + .../markdown-line-ending-or-space.js | 7 + .../dist/character/markdown-line-ending.js | 7 + .../dist/character/markdown-space.js | 7 + .../dist/character/unicode-punctuation.js | 10 + .../dist/character/unicode-whitespace.js | 7 + .../micromark/dist/character/values.d.ts | 102 + .../micromark/dist/character/values.js | 111 + node_modules/micromark/dist/compile/html.js | 787 + .../micromark/dist/constant/assign.js | 5 + .../micromark/dist/constant/constants.d.ts | 23 + .../micromark/dist/constant/constants.js | 71 + .../micromark/dist/constant/from-char-code.js | 5 + .../dist/constant/has-own-property.js | 5 + .../dist/constant/html-block-names.js | 69 + .../micromark/dist/constant/html-raw-names.js | 6 + .../micromark/dist/constant/splice.js | 5 + .../micromark/dist/constant/types.d.ts | 3 + node_modules/micromark/dist/constant/types.js | 357 + .../constant/unicode-punctuation-regex.js | 11 + node_modules/micromark/dist/constructs.js | 127 + node_modules/micromark/dist/index.d.ts | 11 + node_modules/micromark/dist/index.js | 21 + .../micromark/dist/initialize/content.js | 69 + .../micromark/dist/initialize/document.js | 237 + .../micromark/dist/initialize/flow.js | 60 + .../micromark/dist/initialize/text.js | 201 + node_modules/micromark/dist/parse.d.ts | 5 + node_modules/micromark/dist/parse.js | 36 + node_modules/micromark/dist/postprocess.d.ts | 5 + node_modules/micromark/dist/postprocess.js | 13 + node_modules/micromark/dist/preprocess.d.ts | 11 + node_modules/micromark/dist/preprocess.js | 87 + node_modules/micromark/dist/shared-types.d.ts | 291 + node_modules/micromark/dist/stream.d.ts | 6 + node_modules/micromark/dist/stream.js | 103 + .../micromark/dist/tokenize/attention.js | 186 + .../micromark/dist/tokenize/autolink.js | 125 + .../micromark/dist/tokenize/block-quote.js | 67 + .../dist/tokenize/character-escape.js | 34 + .../dist/tokenize/character-reference.js | 94 + .../micromark/dist/tokenize/code-fenced.js | 176 + .../micromark/dist/tokenize/code-indented.js | 72 + .../micromark/dist/tokenize/code-text.js | 162 + .../micromark/dist/tokenize/content.js | 99 + .../micromark/dist/tokenize/definition.js | 115 + .../dist/tokenize/factory-destination.js | 131 + .../micromark/dist/tokenize/factory-label.js | 88 + .../micromark/dist/tokenize/factory-space.js | 30 + .../micromark/dist/tokenize/factory-title.js | 75 + .../dist/tokenize/factory-whitespace.js | 32 + .../dist/tokenize/hard-break-escape.js | 31 + .../micromark/dist/tokenize/heading-atx.js | 129 + .../micromark/dist/tokenize/html-flow.js | 486 + .../micromark/dist/tokenize/html-text.js | 435 + .../micromark/dist/tokenize/label-end.js | 330 + .../dist/tokenize/label-start-image.js | 46 + .../dist/tokenize/label-start-link.js | 35 + .../micromark/dist/tokenize/line-ending.js | 21 + node_modules/micromark/dist/tokenize/list.js | 214 + .../dist/tokenize/partial-blank-line.js | 19 + .../dist/tokenize/setext-underline.js | 117 + .../micromark/dist/tokenize/thematic-break.js | 53 + .../micromark/dist/util/chunked-push.js | 14 + .../micromark/dist/util/chunked-splice.js | 38 + .../micromark/dist/util/classify-character.js | 25 + .../micromark/dist/util/combine-extensions.js | 49 + .../dist/util/combine-html-extensions.js | 34 + .../micromark/dist/util/create-tokenizer.js | 316 + node_modules/micromark/dist/util/miniflat.js | 11 + .../micromark/dist/util/move-point.js | 12 + .../dist/util/normalize-identifier.js | 18 + .../micromark/dist/util/normalize-uri.js | 62 + .../micromark/dist/util/prefix-size.js | 11 + .../micromark/dist/util/regex-check.js | 13 + .../micromark/dist/util/resolve-all.js | 20 + .../micromark/dist/util/safe-from-int.js | 26 + .../micromark/dist/util/serialize-chunks.js | 40 + node_modules/micromark/dist/util/shallow.js | 9 + .../micromark/dist/util/size-chunks.js | 16 + .../micromark/dist/util/slice-chunks.js | 27 + .../micromark/dist/util/subtokenize.js | 199 + node_modules/micromark/index.d.ts | 5 + node_modules/micromark/index.js | 3 + node_modules/micromark/index.mjs | 1 + .../micromark/lib/character/ascii-alpha.js | 7 + .../micromark/lib/character/ascii-alpha.mjs | 3 + .../lib/character/ascii-alphanumeric.js | 7 + .../lib/character/ascii-alphanumeric.mjs | 3 + .../micromark/lib/character/ascii-atext.js | 7 + .../micromark/lib/character/ascii-atext.mjs | 3 + .../micromark/lib/character/ascii-control.js | 14 + .../micromark/lib/character/ascii-control.mjs | 12 + .../micromark/lib/character/ascii-digit.js | 7 + .../micromark/lib/character/ascii-digit.mjs | 3 + .../lib/character/ascii-hex-digit.js | 7 + .../lib/character/ascii-hex-digit.mjs | 3 + .../lib/character/ascii-punctuation.js | 7 + .../lib/character/ascii-punctuation.mjs | 3 + .../micromark/lib/character/codes.d.ts | 148 + node_modules/micromark/lib/character/codes.js | 158 + .../micromark/lib/character/codes.mjs | 154 + .../markdown-line-ending-or-space.js | 9 + .../markdown-line-ending-or-space.mjs | 7 + .../lib/character/markdown-line-ending.js | 9 + .../lib/character/markdown-line-ending.mjs | 7 + .../micromark/lib/character/markdown-space.js | 13 + .../lib/character/markdown-space.mjs | 11 + .../lib/character/unicode-punctuation.js | 10 + .../lib/character/unicode-punctuation.mjs | 6 + .../lib/character/unicode-whitespace.js | 7 + .../lib/character/unicode-whitespace.mjs | 3 + .../micromark/lib/character/values.d.ts | 210 + .../micromark/lib/character/values.js | 111 + .../micromark/lib/character/values.mjs | 107 + node_modules/micromark/lib/compile/html.js | 810 + node_modules/micromark/lib/compile/html.mjs | 813 + node_modules/micromark/lib/constant/assign.js | 5 + .../micromark/lib/constant/assign.mjs | 1 + .../micromark/lib/constant/constants.d.ts | 65 + .../micromark/lib/constant/constants.js | 45 + .../micromark/lib/constant/constants.mjs | 41 + .../micromark/lib/constant/from-char-code.js | 5 + .../micromark/lib/constant/from-char-code.mjs | 1 + .../lib/constant/has-own-property.js | 5 + .../lib/constant/has-own-property.mjs | 1 + .../lib/constant/html-block-names.js | 69 + .../lib/constant/html-block-names.mjs | 65 + .../micromark/lib/constant/html-raw-names.js | 6 + .../micromark/lib/constant/html-raw-names.mjs | 2 + node_modules/micromark/lib/constant/splice.js | 5 + .../micromark/lib/constant/splice.mjs | 1 + .../micromark/lib/constant/types.d.ts | 114 + node_modules/micromark/lib/constant/types.js | 452 + node_modules/micromark/lib/constant/types.mjs | 448 + .../lib/constant/unicode-punctuation-regex.js | 11 + .../constant/unicode-punctuation-regex.mjs | 7 + node_modules/micromark/lib/constructs.js | 98 + node_modules/micromark/lib/constructs.mjs | 85 + node_modules/micromark/lib/index.d.ts | 11 + node_modules/micromark/lib/index.js | 21 + node_modules/micromark/lib/index.mjs | 19 + .../micromark/lib/initialize/content.js | 91 + .../micromark/lib/initialize/content.mjs | 79 + .../micromark/lib/initialize/document.js | 245 + .../micromark/lib/initialize/document.mjs | 239 + node_modules/micromark/lib/initialize/flow.js | 82 + .../micromark/lib/initialize/flow.mjs | 70 + node_modules/micromark/lib/initialize/text.js | 210 + .../micromark/lib/initialize/text.mjs | 203 + node_modules/micromark/lib/parse.d.ts | 5 + node_modules/micromark/lib/parse.js | 36 + node_modules/micromark/lib/parse.mjs | 34 + node_modules/micromark/lib/postprocess.d.ts | 5 + node_modules/micromark/lib/postprocess.js | 13 + node_modules/micromark/lib/postprocess.mjs | 11 + node_modules/micromark/lib/preprocess.d.ts | 11 + node_modules/micromark/lib/preprocess.js | 96 + node_modules/micromark/lib/preprocess.mjs | 94 + node_modules/micromark/lib/shared-types.d.ts | 291 + node_modules/micromark/lib/stream.d.ts | 6 + node_modules/micromark/lib/stream.js | 119 + node_modules/micromark/lib/stream.mjs | 117 + .../micromark/lib/tokenize/attention.js | 216 + .../micromark/lib/tokenize/attention.mjs | 207 + .../micromark/lib/tokenize/autolink.js | 147 + .../micromark/lib/tokenize/autolink.mjs | 138 + .../micromark/lib/tokenize/block-quote.js | 67 + .../micromark/lib/tokenize/block-quote.mjs | 64 + .../lib/tokenize/character-escape.js | 44 + .../lib/tokenize/character-escape.mjs | 35 + .../lib/tokenize/character-reference.js | 101 + .../lib/tokenize/character-reference.mjs | 88 + .../micromark/lib/tokenize/code-fenced.js | 185 + .../micromark/lib/tokenize/code-fenced.mjs | 176 + .../micromark/lib/tokenize/code-indented.js | 91 + .../micromark/lib/tokenize/code-indented.mjs | 88 + .../micromark/lib/tokenize/code-text.js | 191 + .../micromark/lib/tokenize/code-text.mjs | 179 + .../micromark/lib/tokenize/content.js | 121 + .../micromark/lib/tokenize/content.mjs | 109 + .../micromark/lib/tokenize/definition.js | 129 + .../micromark/lib/tokenize/definition.mjs | 120 + .../lib/tokenize/factory-destination.js | 145 + .../lib/tokenize/factory-destination.mjs | 143 + .../micromark/lib/tokenize/factory-label.js | 102 + .../micromark/lib/tokenize/factory-label.mjs | 94 + .../micromark/lib/tokenize/factory-space.js | 31 + .../micromark/lib/tokenize/factory-space.mjs | 29 + .../micromark/lib/tokenize/factory-title.js | 92 + .../micromark/lib/tokenize/factory-title.mjs | 84 + .../lib/tokenize/factory-whitespace.js | 34 + .../lib/tokenize/factory-whitespace.mjs | 32 + .../lib/tokenize/hard-break-escape.js | 41 + .../lib/tokenize/hard-break-escape.mjs | 32 + .../micromark/lib/tokenize/heading-atx.js | 151 + .../micromark/lib/tokenize/heading-atx.mjs | 142 + .../micromark/lib/tokenize/html-flow.js | 513 + .../micromark/lib/tokenize/html-flow.mjs | 498 + .../micromark/lib/tokenize/html-text.js | 458 + .../micromark/lib/tokenize/html-text.mjs | 449 + .../micromark/lib/tokenize/label-end.js | 374 + .../micromark/lib/tokenize/label-end.mjs | 350 + .../lib/tokenize/label-start-image.js | 56 + .../lib/tokenize/label-start-image.mjs | 48 + .../lib/tokenize/label-start-link.js | 46 + .../lib/tokenize/label-start-link.mjs | 38 + .../micromark/lib/tokenize/line-ending.js | 31 + .../micromark/lib/tokenize/line-ending.mjs | 22 + node_modules/micromark/lib/tokenize/list.js | 219 + node_modules/micromark/lib/tokenize/list.mjs | 216 + .../lib/tokenize/partial-blank-line.js | 21 + .../lib/tokenize/partial-blank-line.mjs | 18 + .../lib/tokenize/setext-underline.js | 138 + .../lib/tokenize/setext-underline.mjs | 129 + .../micromark/lib/tokenize/thematic-break.js | 74 + .../micromark/lib/tokenize/thematic-break.mjs | 65 + .../micromark/lib/util/chunked-push.js | 14 + .../micromark/lib/util/chunked-push.mjs | 12 + .../micromark/lib/util/chunked-splice.js | 46 + .../micromark/lib/util/chunked-splice.mjs | 44 + .../micromark/lib/util/classify-character.js | 27 + .../micromark/lib/util/classify-character.mjs | 25 + .../micromark/lib/util/combine-extensions.js | 50 + .../micromark/lib/util/combine-extensions.mjs | 48 + .../lib/util/combine-html-extensions.js | 35 + .../lib/util/combine-html-extensions.mjs | 31 + .../micromark/lib/util/create-tokenizer.js | 440 + .../micromark/lib/util/create-tokenizer.mjs | 399 + node_modules/micromark/lib/util/miniflat.js | 11 + node_modules/micromark/lib/util/miniflat.mjs | 9 + node_modules/micromark/lib/util/move-point.js | 12 + .../micromark/lib/util/move-point.mjs | 10 + .../lib/util/normalize-identifier.js | 23 + .../lib/util/normalize-identifier.mjs | 21 + .../micromark/lib/util/normalize-uri.js | 70 + .../micromark/lib/util/normalize-uri.mjs | 68 + .../micromark/lib/util/prefix-size.js | 11 + .../micromark/lib/util/prefix-size.mjs | 9 + .../micromark/lib/util/regex-check.js | 12 + .../micromark/lib/util/regex-check.mjs | 10 + .../micromark/lib/util/resolve-all.js | 20 + .../micromark/lib/util/resolve-all.mjs | 18 + .../micromark/lib/util/safe-from-int.js | 32 + .../micromark/lib/util/safe-from-int.mjs | 30 + .../micromark/lib/util/serialize-chunks.js | 54 + .../micromark/lib/util/serialize-chunks.mjs | 42 + node_modules/micromark/lib/util/shallow.js | 9 + node_modules/micromark/lib/util/shallow.mjs | 7 + .../micromark/lib/util/size-chunks.js | 16 + .../micromark/lib/util/size-chunks.mjs | 14 + .../micromark/lib/util/slice-chunks.js | 43 + .../micromark/lib/util/slice-chunks.mjs | 29 + .../micromark/lib/util/subtokenize.js | 219 + .../micromark/lib/util/subtokenize.mjs | 211 + node_modules/micromark/license | 22 + node_modules/micromark/package.json | 208 + node_modules/micromark/readme.md | 737 + node_modules/micromark/stream.d.ts | 5 + node_modules/micromark/stream.js | 3 + node_modules/micromark/stream.mjs | 1 + node_modules/parse-entities/index.js | 21 +- node_modules/parse-entities/package.json | 30 +- node_modules/parse-entities/readme.md | 6 +- node_modules/parse-entities/types/index.d.ts | 157 + node_modules/picomatch/CHANGELOG.md | 6 + node_modules/picomatch/README.md | 48 +- node_modules/picomatch/lib/parse.js | 18 +- node_modules/picomatch/lib/picomatch.js | 65 +- node_modules/picomatch/lib/scan.js | 10 +- node_modules/picomatch/package.json | 2 +- node_modules/remark-footnotes/index.js | 38 + node_modules/remark-footnotes/license | 22 + node_modules/remark-footnotes/package.json | 91 + node_modules/remark-footnotes/readme.md | 223 + .../remark-footnotes/types/index.d.ts | 20 + node_modules/remark-frontmatter/index.js | 66 +- .../remark-frontmatter/lib/compile.js | 19 - node_modules/remark-frontmatter/lib/fence.js | 18 - node_modules/remark-frontmatter/lib/parse.js | 46 - node_modules/remark-frontmatter/package.json | 32 +- node_modules/remark-frontmatter/readme.md | 184 +- .../remark-frontmatter/types/index.d.ts | 63 + node_modules/remark-gfm/index.js | 39 + node_modules/remark-gfm/license | 22 + node_modules/remark-gfm/package.json | 88 + node_modules/remark-gfm/readme.md | 230 + node_modules/remark-gfm/types/index.d.ts | 39 + node_modules/remark-parse/index.js | 28 +- .../remark-parse/lib/block-elements.json | 68 - node_modules/remark-parse/lib/decode.js | 64 - node_modules/remark-parse/lib/defaults.js | 10 - node_modules/remark-parse/lib/locate/break.js | 17 - .../remark-parse/lib/locate/code-inline.js | 7 - .../remark-parse/lib/locate/delete.js | 7 - .../remark-parse/lib/locate/emphasis.js | 18 - .../remark-parse/lib/locate/escape.js | 7 - node_modules/remark-parse/lib/locate/link.js | 16 - .../remark-parse/lib/locate/strong.js | 18 - node_modules/remark-parse/lib/locate/tag.js | 7 - node_modules/remark-parse/lib/locate/url.js | 26 - node_modules/remark-parse/lib/parse.js | 45 - node_modules/remark-parse/lib/parser.js | 152 - node_modules/remark-parse/lib/set-options.js | 47 - .../remark-parse/lib/tokenize/auto-link.js | 145 - .../remark-parse/lib/tokenize/blockquote.js | 129 - .../remark-parse/lib/tokenize/break.js | 40 - .../remark-parse/lib/tokenize/code-fenced.js | 236 - .../lib/tokenize/code-indented.js | 98 - .../remark-parse/lib/tokenize/code-inline.js | 112 - .../remark-parse/lib/tokenize/definition.js | 278 - .../remark-parse/lib/tokenize/delete.js | 60 - .../remark-parse/lib/tokenize/emphasis.js | 85 - .../remark-parse/lib/tokenize/escape.js | 34 - .../lib/tokenize/footnote-definition.js | 185 - .../remark-parse/lib/tokenize/heading-atx.js | 141 - .../lib/tokenize/heading-setext.js | 107 - .../remark-parse/lib/tokenize/html-block.js | 94 - .../remark-parse/lib/tokenize/html-inline.js | 54 - .../remark-parse/lib/tokenize/link.js | 392 - .../remark-parse/lib/tokenize/list.js | 474 - .../remark-parse/lib/tokenize/newline.js | 47 - .../remark-parse/lib/tokenize/paragraph.js | 122 - .../remark-parse/lib/tokenize/reference.js | 206 - .../remark-parse/lib/tokenize/strong.js | 84 - .../remark-parse/lib/tokenize/table.js | 266 - .../remark-parse/lib/tokenize/text.js | 58 - .../lib/tokenize/thematic-break.js | 70 - node_modules/remark-parse/lib/tokenize/url.js | 144 - node_modules/remark-parse/lib/tokenizer.js | 331 - node_modules/remark-parse/lib/unescape.js | 37 - .../remark-parse/lib/util/get-indentation.js | 32 - node_modules/remark-parse/lib/util/html.js | 25 - .../remark-parse/lib/util/interrupt.js | 43 - .../remark-parse/lib/util/normalize.js | 11 - .../lib/util/remove-indentation.js | 78 - node_modules/remark-parse/package.json | 48 +- node_modules/remark-parse/readme.md | 536 +- node_modules/remark-parse/types/index.d.ts | 14 + node_modules/replace-ext/LICENSE | 21 - node_modules/replace-ext/README.md | 50 - node_modules/replace-ext/index.js | 18 - node_modules/replace-ext/package.json | 44 - node_modules/state-toggle/index.js | 23 - node_modules/state-toggle/readme.md | 95 - node_modules/structured-source/README.md | 66 - node_modules/structured-source/lib/index.js | 9 - .../lib/structured-source.js | 84 - node_modules/structured-source/package.json | 56 - node_modules/trim-trailing-lines/index.js | 8 - node_modules/trim-trailing-lines/readme.md | 68 - node_modules/trim/.npmignore | 4 - node_modules/trim/History.md | 5 - node_modules/trim/Makefile | 7 - node_modules/trim/Readme.md | 69 - node_modules/trim/component.json | 7 - node_modules/trim/index.js | 14 - node_modules/trim/package.json | 18 - node_modules/unherit/index.js | 45 - node_modules/unherit/license | 21 - node_modules/unherit/package.json | 72 - node_modules/unherit/readme.md | 79 - node_modules/unified/changelog.md | 5 + node_modules/unified/index.js | 224 +- node_modules/unified/package.json | 85 +- node_modules/unified/readme.md | 1043 +- node_modules/unified/types/ts3.4/index.d.ts | 407 + node_modules/unified/types/ts4.0/index.d.ts | 402 + node_modules/unist-util-is/convert.d.ts | 6 + node_modules/unist-util-is/convert.js | 46 +- node_modules/unist-util-is/index.d.ts | 71 + node_modules/unist-util-is/index.js | 19 +- node_modules/unist-util-is/package.json | 72 +- node_modules/unist-util-is/readme.md | 33 +- .../unist-util-remove-position/index.js | 18 - .../unist-util-remove-position/package.json | 76 - .../unist-util-remove-position/readme.md | 131 - .../unist-util-stringify-position/index.js | 14 +- .../package.json | 55 +- .../unist-util-stringify-position/readme.md | 92 +- .../types/index.d.ts | 9 + .../unist-util-visit-parents/color.browser.js | 4 + .../unist-util-visit-parents/color.js | 4 + .../unist-util-visit-parents/index.js | 69 +- .../unist-util-visit-parents/package.json | 68 +- .../unist-util-visit-parents/readme.md | 57 +- .../unist-util-visit-parents/types/index.d.ts | 111 + node_modules/unist-util-visit/index.js | 29 - node_modules/unist-util-visit/package.json | 79 - node_modules/unist-util-visit/readme.md | 121 - node_modules/vfile-location/index.js | 74 - node_modules/vfile-location/license | 22 - node_modules/vfile-location/package.json | 73 - node_modules/vfile-location/readme.md | 115 - node_modules/vfile-message/package.json | 40 +- node_modules/vfile-message/readme.md | 60 +- node_modules/vfile-message/types/index.d.ts | 98 + node_modules/vfile/changelog.md | 5 + node_modules/vfile/core.js | 170 +- node_modules/vfile/index.js | 54 +- node_modules/vfile/lib/core.js | 173 + node_modules/vfile/lib/index.js | 46 + node_modules/vfile/lib/minpath.browser.js | 374 + node_modules/vfile/lib/minpath.js | 3 + node_modules/vfile/lib/minproc.browser.js | 10 + node_modules/vfile/lib/minproc.js | 3 + node_modules/vfile/package.json | 95 +- node_modules/vfile/readme.md | 427 +- node_modules/vfile/types/index.d.ts | 161 + node_modules/x-is-string/.npmignore | 16 - node_modules/x-is-string/.travis.yml | 8 - node_modules/x-is-string/LICENCE | 19 - node_modules/x-is-string/README.md | 46 - node_modules/x-is-string/index.js | 7 - node_modules/x-is-string/package.json | 55 - node_modules/x-is-string/test/index.js | 51 - node_modules/xtend/.jshintrc | 30 - node_modules/xtend/LICENSE | 20 - node_modules/xtend/README.md | 32 - node_modules/xtend/immutable.js | 19 - node_modules/xtend/mutable.js | 17 - node_modules/xtend/package.json | 55 - node_modules/xtend/test.js | 103 - node_modules/zwitch/index.js | 28 + .../license | 0 .../{state-toggle => zwitch}/package.json | 35 +- node_modules/zwitch/readme.md | 143 + 1072 files changed, 46395 insertions(+), 69321 deletions(-) rename node_modules/{@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/LICENSE => @actions/github/LICENSE.md} (95%) delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/README.md delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/method-types.js delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/parameters-and-response-types.js delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/types.js delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/endpoints-to-methods.d.ts delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/endpoints.d.ts delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/index.d.ts delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/types.d.ts delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map delete mode 100644 node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/package.json delete mode 100644 node_modules/@nodelib/fs.scandir/.eslintcache delete mode 100644 node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/adapters/fs.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/adapters/fs.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/adapters/fs.spec.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/constants.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/index.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/index.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/index.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/index.spec.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/async.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/async.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/async.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/async.spec.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/common.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/common.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/common.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/common.spec.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/sync.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/sync.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/sync.spec.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/settings.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/settings.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/settings.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/settings.spec.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/types/index.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/fs.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/fs.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/fs.spec.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/index.d.ts.map delete mode 100644 node_modules/@nodelib/fs.scandir/src/adapters/fs.spec.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/adapters/fs.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/constants.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/index.spec.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/index.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/providers/async.spec.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/providers/async.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/providers/common.spec.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/providers/common.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/providers/sync.spec.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/providers/sync.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/settings.spec.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/settings.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/types/index.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/utils/fs.spec.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/utils/fs.ts delete mode 100644 node_modules/@nodelib/fs.scandir/src/utils/index.ts delete mode 100644 node_modules/@nodelib/fs.scandir/tsconfig.json delete mode 100644 node_modules/@nodelib/fs.scandir/tsconfig.tsbuildinfo delete mode 100644 node_modules/@nodelib/fs.stat/.eslintcache delete mode 100644 node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts.map delete mode 100644 node_modules/@nodelib/fs.stat/out/adapters/fs.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/adapters/fs.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.stat/out/adapters/fs.spec.js delete mode 100644 node_modules/@nodelib/fs.stat/out/index.d.ts.map delete mode 100644 node_modules/@nodelib/fs.stat/out/index.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/index.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.stat/out/index.spec.js delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/async.d.ts.map delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/async.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/async.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/async.spec.js delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/sync.d.ts.map delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/sync.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/sync.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/sync.spec.js delete mode 100644 node_modules/@nodelib/fs.stat/out/settings.d.ts.map delete mode 100644 node_modules/@nodelib/fs.stat/out/settings.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/settings.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.stat/out/settings.spec.js delete mode 100644 node_modules/@nodelib/fs.stat/out/types/index.d.ts.map delete mode 100644 node_modules/@nodelib/fs.stat/src/adapters/fs.spec.ts delete mode 100644 node_modules/@nodelib/fs.stat/src/adapters/fs.ts delete mode 100644 node_modules/@nodelib/fs.stat/src/index.spec.ts delete mode 100644 node_modules/@nodelib/fs.stat/src/index.ts delete mode 100644 node_modules/@nodelib/fs.stat/src/providers/async.spec.ts delete mode 100644 node_modules/@nodelib/fs.stat/src/providers/async.ts delete mode 100644 node_modules/@nodelib/fs.stat/src/providers/sync.spec.ts delete mode 100644 node_modules/@nodelib/fs.stat/src/providers/sync.ts delete mode 100644 node_modules/@nodelib/fs.stat/src/settings.spec.ts delete mode 100644 node_modules/@nodelib/fs.stat/src/settings.ts delete mode 100644 node_modules/@nodelib/fs.stat/src/types/index.ts delete mode 100644 node_modules/@nodelib/fs.stat/tsconfig.json delete mode 100644 node_modules/@nodelib/fs.stat/tsconfig.tsbuildinfo delete mode 100644 node_modules/@nodelib/fs.walk/.eslintcache delete mode 100644 node_modules/@nodelib/fs.walk/out/index.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/index.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/index.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/index.spec.js delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/async.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/async.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/async.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/async.spec.js delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/index.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/stream.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/stream.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/stream.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/stream.spec.js delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/sync.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/sync.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/sync.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/sync.spec.js delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/async.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/async.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/async.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/async.spec.js delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/common.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/common.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/common.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/common.spec.js delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/reader.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/reader.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/reader.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/reader.spec.js delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/sync.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/sync.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/sync.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/sync.spec.js delete mode 100644 node_modules/@nodelib/fs.walk/out/settings.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/settings.spec.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/settings.spec.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/settings.spec.js delete mode 100644 node_modules/@nodelib/fs.walk/out/tests/index.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/tests/index.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/out/tests/index.js delete mode 100644 node_modules/@nodelib/fs.walk/out/types/index.d.ts.map delete mode 100644 node_modules/@nodelib/fs.walk/src/index.spec.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/index.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/providers/async.spec.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/providers/async.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/providers/index.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/providers/stream.spec.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/providers/stream.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/providers/sync.spec.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/providers/sync.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/readers/async.spec.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/readers/async.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/readers/common.spec.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/readers/common.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/readers/reader.spec.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/readers/reader.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/readers/sync.spec.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/readers/sync.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/settings.spec.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/settings.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/tests/index.ts delete mode 100644 node_modules/@nodelib/fs.walk/src/types/index.ts delete mode 100644 node_modules/@nodelib/fs.walk/tsconfig.json delete mode 100644 node_modules/@nodelib/fs.walk/tsconfig.tsbuildinfo delete mode 100644 node_modules/@octokit/request/node_modules/is-plain-object/LICENSE delete mode 100644 node_modules/@octokit/request/node_modules/is-plain-object/README.md delete mode 100644 node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js delete mode 100644 node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.mjs delete mode 100644 node_modules/@octokit/request/node_modules/is-plain-object/is-plain-object.d.ts delete mode 100644 node_modules/@octokit/request/node_modules/is-plain-object/package.json delete mode 100644 node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/README.md delete mode 100644 node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-node/index.js delete mode 100644 node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-node/index.js.map delete mode 100644 node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-src/generated/types.js delete mode 100644 node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-src/index.js delete mode 100644 node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-src/version.js delete mode 100644 node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-types/generated/types.d.ts delete mode 100644 node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-types/index.d.ts delete mode 100644 node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-types/version.d.ts delete mode 100644 node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-web/index.js delete mode 100644 node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-web/index.js.map delete mode 100644 node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/package.json delete mode 100644 node_modules/@textlint/ast-node-types/CHANGELOG.md delete mode 100644 node_modules/@textlint/ast-node-types/lib/index.js.map rename node_modules/@textlint/ast-node-types/lib/{ => src}/TypeofTxtNode.d.ts (100%) rename node_modules/@textlint/ast-node-types/lib/{ => src}/TypeofTxtNode.js (100%) rename node_modules/@textlint/ast-node-types/lib/{ => src}/TypeofTxtNode.js.map (52%) rename node_modules/@textlint/ast-node-types/lib/{ => src}/index.d.ts (100%) rename node_modules/@textlint/ast-node-types/lib/{ => src}/index.js (100%) create mode 100644 node_modules/@textlint/ast-node-types/lib/src/index.js.map delete mode 100644 node_modules/@textlint/ast-node-types/module/index.js.map rename node_modules/@textlint/ast-node-types/module/{ => src}/TypeofTxtNode.d.ts (100%) rename node_modules/@textlint/ast-node-types/module/{ => src}/TypeofTxtNode.js (100%) rename node_modules/@textlint/ast-node-types/module/{ => src}/TypeofTxtNode.js.map (52%) rename node_modules/@textlint/ast-node-types/module/{ => src}/index.d.ts (100%) rename node_modules/@textlint/ast-node-types/module/{ => src}/index.js (100%) create mode 100644 node_modules/@textlint/ast-node-types/module/src/index.js.map delete mode 100644 node_modules/@textlint/markdown-to-ast/CHANGELOG.md delete mode 100644 node_modules/@textlint/markdown-to-ast/lib/index.js.map delete mode 100644 node_modules/@textlint/markdown-to-ast/lib/mapping/markdown-syntax-map.js.map rename node_modules/@textlint/markdown-to-ast/lib/{ => src}/index.d.ts (100%) rename node_modules/@textlint/markdown-to-ast/lib/{ => src}/index.js (51%) create mode 100644 node_modules/@textlint/markdown-to-ast/lib/src/index.js.map rename node_modules/@textlint/markdown-to-ast/lib/{ => src}/mapping/markdown-syntax-map.d.ts (96%) rename node_modules/@textlint/markdown-to-ast/lib/{ => src}/mapping/markdown-syntax-map.js (93%) create mode 100644 node_modules/@textlint/markdown-to-ast/lib/src/mapping/markdown-syntax-map.js.map create mode 100644 node_modules/@textlint/markdown-to-ast/lib/src/parse-markdown.d.ts create mode 100644 node_modules/@textlint/markdown-to-ast/lib/src/parse-markdown.js create mode 100644 node_modules/@textlint/markdown-to-ast/lib/src/parse-markdown.js.map delete mode 100644 node_modules/@textlint/markdown-to-ast/module/index.js.map delete mode 100644 node_modules/@textlint/markdown-to-ast/module/mapping/markdown-syntax-map.js.map rename node_modules/@textlint/markdown-to-ast/module/{ => src}/index.d.ts (100%) rename node_modules/@textlint/markdown-to-ast/module/{ => src}/index.js (53%) create mode 100644 node_modules/@textlint/markdown-to-ast/module/src/index.js.map rename node_modules/@textlint/markdown-to-ast/module/{ => src}/mapping/markdown-syntax-map.d.ts (96%) rename node_modules/@textlint/markdown-to-ast/module/{ => src}/mapping/markdown-syntax-map.js (94%) create mode 100644 node_modules/@textlint/markdown-to-ast/module/src/mapping/markdown-syntax-map.js.map create mode 100644 node_modules/@textlint/markdown-to-ast/module/src/parse-markdown.d.ts create mode 100644 node_modules/@textlint/markdown-to-ast/module/src/parse-markdown.js create mode 100644 node_modules/@textlint/markdown-to-ast/module/src/parse-markdown.js.map create mode 100644 node_modules/@textlint/markdown-to-ast/src/parse-markdown.ts create mode 100644 node_modules/@types/mdast/LICENSE create mode 100644 node_modules/@types/mdast/README.md create mode 100644 node_modules/@types/mdast/index.d.ts create mode 100644 node_modules/@types/mdast/package.json create mode 100644 node_modules/@types/unist/LICENSE create mode 100644 node_modules/@types/unist/README.md create mode 100644 node_modules/@types/unist/index.d.ts create mode 100644 node_modules/@types/unist/package.json delete mode 100644 node_modules/boundary/README.md delete mode 100644 node_modules/boundary/lib/index.js delete mode 100644 node_modules/boundary/package.json create mode 100644 node_modules/ccount/index.js rename node_modules/{collapse-white-space => ccount}/license (100%) rename node_modules/{trim-trailing-lines => ccount}/package.json (71%) create mode 100644 node_modules/ccount/readme.md delete mode 100644 node_modules/collapse-white-space/index.js delete mode 100644 node_modules/collapse-white-space/readme.md create mode 100644 node_modules/escape-string-regexp/index.d.ts create mode 100644 node_modules/escape-string-regexp/index.js rename node_modules/{@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/LICENSE => escape-string-regexp/license} (80%) create mode 100644 node_modules/escape-string-regexp/package.json create mode 100644 node_modules/escape-string-regexp/readme.md delete mode 100644 node_modules/inherits/LICENSE delete mode 100644 node_modules/inherits/README.md delete mode 100644 node_modules/inherits/inherits.js delete mode 100644 node_modules/inherits/inherits_browser.js delete mode 100644 node_modules/inherits/package.json create mode 100644 node_modules/is-buffer/index.d.ts delete mode 100644 node_modules/is-buffer/test/basic.js rename node_modules/{to-regex-range/node_modules => }/is-number/LICENSE (100%) rename node_modules/{to-regex-range/node_modules => }/is-number/README.md (100%) rename node_modules/{to-regex-range/node_modules => }/is-number/index.js (100%) rename node_modules/{to-regex-range/node_modules => }/is-number/package.json (100%) create mode 100644 node_modules/is-plain-obj/index.d.ts rename node_modules/{@octokit/endpoint/node_modules => }/is-plain-object/LICENSE (100%) rename node_modules/{@octokit/endpoint/node_modules => }/is-plain-object/README.md (100%) rename node_modules/{@octokit/endpoint/node_modules => }/is-plain-object/dist/is-plain-object.js (100%) rename node_modules/{@octokit/endpoint/node_modules => }/is-plain-object/dist/is-plain-object.mjs (100%) rename node_modules/{@octokit/endpoint/node_modules => }/is-plain-object/is-plain-object.d.ts (100%) rename node_modules/{@octokit/endpoint/node_modules => }/is-plain-object/package.json (100%) delete mode 100644 node_modules/is-whitespace-character/index.js delete mode 100644 node_modules/is-whitespace-character/package.json delete mode 100644 node_modules/is-whitespace-character/readme.md delete mode 100644 node_modules/is-word-character/index.js delete mode 100644 node_modules/is-word-character/readme.md create mode 100644 node_modules/longest-streak/index.js rename node_modules/{trim-trailing-lines => longest-streak}/license (100%) rename node_modules/{is-word-character => longest-streak}/package.json (72%) create mode 100644 node_modules/longest-streak/readme.md delete mode 100644 node_modules/markdown-escapes/index.js delete mode 100644 node_modules/markdown-escapes/package.json delete mode 100644 node_modules/markdown-escapes/readme.md create mode 100644 node_modules/markdown-table/index.js rename node_modules/{state-toggle => markdown-table}/license (94%) rename node_modules/{collapse-white-space => markdown-table}/package.json (64%) create mode 100644 node_modules/markdown-table/readme.md create mode 100644 node_modules/mdast-util-find-and-replace/index.js rename node_modules/{markdown-escapes => mdast-util-find-and-replace}/license (94%) create mode 100644 node_modules/mdast-util-find-and-replace/package.json create mode 100644 node_modules/mdast-util-find-and-replace/readme.md create mode 100644 node_modules/mdast-util-footnote/from-markdown.js create mode 100644 node_modules/mdast-util-footnote/index.js rename node_modules/{unist-util-remove-position => mdast-util-footnote}/license (94%) create mode 100644 node_modules/mdast-util-footnote/package.json create mode 100644 node_modules/mdast-util-footnote/readme.md create mode 100644 node_modules/mdast-util-footnote/to-markdown.js create mode 100644 node_modules/mdast-util-from-markdown/dist/index.js create mode 100644 node_modules/mdast-util-from-markdown/index.js create mode 100644 node_modules/mdast-util-from-markdown/lib/index.js rename node_modules/{is-word-character => mdast-util-from-markdown}/license (94%) create mode 100644 node_modules/mdast-util-from-markdown/package.json create mode 100644 node_modules/mdast-util-from-markdown/readme.md create mode 100644 node_modules/mdast-util-from-markdown/types/index.d.ts create mode 100644 node_modules/mdast-util-frontmatter/from-markdown.js create mode 100644 node_modules/mdast-util-frontmatter/index.js create mode 100644 node_modules/mdast-util-frontmatter/license create mode 100644 node_modules/mdast-util-frontmatter/package.json create mode 100644 node_modules/mdast-util-frontmatter/readme.md create mode 100644 node_modules/mdast-util-frontmatter/to-markdown.js create mode 100644 node_modules/mdast-util-gfm-autolink-literal/from-markdown.js create mode 100644 node_modules/mdast-util-gfm-autolink-literal/index.js create mode 100644 node_modules/mdast-util-gfm-autolink-literal/license create mode 100644 node_modules/mdast-util-gfm-autolink-literal/package.json create mode 100644 node_modules/mdast-util-gfm-autolink-literal/readme.md create mode 100644 node_modules/mdast-util-gfm-autolink-literal/to-markdown.js create mode 100644 node_modules/mdast-util-gfm-strikethrough/from-markdown.js create mode 100644 node_modules/mdast-util-gfm-strikethrough/index.js create mode 100644 node_modules/mdast-util-gfm-strikethrough/license create mode 100644 node_modules/mdast-util-gfm-strikethrough/package.json create mode 100644 node_modules/mdast-util-gfm-strikethrough/readme.md create mode 100644 node_modules/mdast-util-gfm-strikethrough/to-markdown.js create mode 100644 node_modules/mdast-util-gfm-table/from-markdown.js create mode 100644 node_modules/mdast-util-gfm-table/index.js create mode 100644 node_modules/mdast-util-gfm-table/license create mode 100644 node_modules/mdast-util-gfm-table/package.json create mode 100644 node_modules/mdast-util-gfm-table/readme.md create mode 100644 node_modules/mdast-util-gfm-table/to-markdown.js create mode 100644 node_modules/mdast-util-gfm-task-list-item/from-markdown.js create mode 100644 node_modules/mdast-util-gfm-task-list-item/index.js create mode 100644 node_modules/mdast-util-gfm-task-list-item/license create mode 100644 node_modules/mdast-util-gfm-task-list-item/package.json create mode 100644 node_modules/mdast-util-gfm-task-list-item/readme.md create mode 100644 node_modules/mdast-util-gfm-task-list-item/to-markdown.js create mode 100644 node_modules/mdast-util-gfm/from-markdown.js create mode 100644 node_modules/mdast-util-gfm/index.js create mode 100644 node_modules/mdast-util-gfm/license create mode 100644 node_modules/mdast-util-gfm/package.json create mode 100644 node_modules/mdast-util-gfm/readme.md create mode 100644 node_modules/mdast-util-gfm/to-markdown.js create mode 100644 node_modules/mdast-util-to-markdown/index.js create mode 100644 node_modules/mdast-util-to-markdown/lib/configure.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/blockquote.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/break.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/code.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/definition.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/emphasis.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/heading.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/html.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/image-reference.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/image.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/index.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/inline-code.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/link-reference.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/link.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/list-item.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/list.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/paragraph.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/root.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/strong.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/text.js create mode 100644 node_modules/mdast-util-to-markdown/lib/handle/thematic-break.js create mode 100644 node_modules/mdast-util-to-markdown/lib/index.js create mode 100644 node_modules/mdast-util-to-markdown/lib/join.js create mode 100644 node_modules/mdast-util-to-markdown/lib/unsafe.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/association.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/check-bullet.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/check-emphasis.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/check-fence.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/check-list-item-indent.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/check-quote.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/check-rule-repeat.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/check-rule.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/check-strong.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/container-flow.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/container-phrasing.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/format-code-as-indented.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/format-heading-as-setext.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/format-link-as-autolink.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/indent-lines.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/pattern-compile.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/pattern-in-scope.js create mode 100644 node_modules/mdast-util-to-markdown/lib/util/safe.js create mode 100644 node_modules/mdast-util-to-markdown/license create mode 100644 node_modules/mdast-util-to-markdown/package.json create mode 100644 node_modules/mdast-util-to-markdown/readme.md create mode 100644 node_modules/mdast-util-to-markdown/types/index.d.ts create mode 100644 node_modules/mdast-util-to-string/index.js rename node_modules/{unist-util-visit => mdast-util-to-string}/license (100%) create mode 100644 node_modules/mdast-util-to-string/package.json create mode 100644 node_modules/mdast-util-to-string/readme.md create mode 100644 node_modules/mdast-util-to-string/types/index.d.ts create mode 100644 node_modules/micromark-extension-footnote/html.js create mode 100644 node_modules/micromark-extension-footnote/index.js create mode 100644 node_modules/micromark-extension-footnote/license create mode 100644 node_modules/micromark-extension-footnote/package.json create mode 100644 node_modules/micromark-extension-footnote/readme.md create mode 100644 node_modules/micromark-extension-frontmatter/html.js create mode 100644 node_modules/micromark-extension-frontmatter/index.js create mode 100644 node_modules/micromark-extension-frontmatter/lib/html.js rename node_modules/{remark-frontmatter => micromark-extension-frontmatter}/lib/matters.js (78%) create mode 100644 node_modules/micromark-extension-frontmatter/lib/syntax.js create mode 100644 node_modules/micromark-extension-frontmatter/license create mode 100644 node_modules/micromark-extension-frontmatter/package.json create mode 100644 node_modules/micromark-extension-frontmatter/readme.md create mode 100644 node_modules/micromark-extension-gfm-autolink-literal/html.js create mode 100644 node_modules/micromark-extension-gfm-autolink-literal/index.js create mode 100644 node_modules/micromark-extension-gfm-autolink-literal/license create mode 100644 node_modules/micromark-extension-gfm-autolink-literal/package.json create mode 100644 node_modules/micromark-extension-gfm-autolink-literal/readme.md create mode 100644 node_modules/micromark-extension-gfm-autolink-literal/syntax.js create mode 100644 node_modules/micromark-extension-gfm-strikethrough/html.js create mode 100644 node_modules/micromark-extension-gfm-strikethrough/index.js create mode 100644 node_modules/micromark-extension-gfm-strikethrough/license create mode 100644 node_modules/micromark-extension-gfm-strikethrough/package.json create mode 100644 node_modules/micromark-extension-gfm-strikethrough/readme.md create mode 100644 node_modules/micromark-extension-gfm-strikethrough/types/html.d.ts create mode 100644 node_modules/micromark-extension-gfm-strikethrough/types/index.d.ts create mode 100644 node_modules/micromark-extension-gfm-table/html.js create mode 100644 node_modules/micromark-extension-gfm-table/index.js create mode 100644 node_modules/micromark-extension-gfm-table/license create mode 100644 node_modules/micromark-extension-gfm-table/package.json create mode 100644 node_modules/micromark-extension-gfm-table/readme.md create mode 100644 node_modules/micromark-extension-gfm-table/syntax.js create mode 100644 node_modules/micromark-extension-gfm-tagfilter/html.js create mode 100644 node_modules/micromark-extension-gfm-tagfilter/index.js create mode 100644 node_modules/micromark-extension-gfm-tagfilter/license create mode 100644 node_modules/micromark-extension-gfm-tagfilter/package.json create mode 100644 node_modules/micromark-extension-gfm-tagfilter/readme.md create mode 100644 node_modules/micromark-extension-gfm-task-list-item/html.js create mode 100644 node_modules/micromark-extension-gfm-task-list-item/index.js create mode 100644 node_modules/micromark-extension-gfm-task-list-item/license create mode 100644 node_modules/micromark-extension-gfm-task-list-item/package.json create mode 100644 node_modules/micromark-extension-gfm-task-list-item/readme.md create mode 100644 node_modules/micromark-extension-gfm-task-list-item/syntax.js create mode 100644 node_modules/micromark-extension-gfm/html.js create mode 100644 node_modules/micromark-extension-gfm/index.js create mode 100644 node_modules/micromark-extension-gfm/license create mode 100644 node_modules/micromark-extension-gfm/package.json create mode 100644 node_modules/micromark-extension-gfm/readme.md create mode 100644 node_modules/micromark-extension-gfm/syntax.js create mode 100644 node_modules/micromark-extension-gfm/types/html.d.ts create mode 100644 node_modules/micromark-extension-gfm/types/index.d.ts create mode 100644 node_modules/micromark/buffer.d.ts create mode 100644 node_modules/micromark/buffer.js create mode 100644 node_modules/micromark/buffer.mjs create mode 100644 node_modules/micromark/dist/character/ascii-alpha.js create mode 100644 node_modules/micromark/dist/character/ascii-alphanumeric.js create mode 100644 node_modules/micromark/dist/character/ascii-atext.js create mode 100644 node_modules/micromark/dist/character/ascii-control.js create mode 100644 node_modules/micromark/dist/character/ascii-digit.js create mode 100644 node_modules/micromark/dist/character/ascii-hex-digit.js create mode 100644 node_modules/micromark/dist/character/ascii-punctuation.js create mode 100644 node_modules/micromark/dist/character/codes.d.ts create mode 100644 node_modules/micromark/dist/character/codes.js create mode 100644 node_modules/micromark/dist/character/markdown-line-ending-or-space.js create mode 100644 node_modules/micromark/dist/character/markdown-line-ending.js create mode 100644 node_modules/micromark/dist/character/markdown-space.js create mode 100644 node_modules/micromark/dist/character/unicode-punctuation.js create mode 100644 node_modules/micromark/dist/character/unicode-whitespace.js create mode 100644 node_modules/micromark/dist/character/values.d.ts create mode 100644 node_modules/micromark/dist/character/values.js create mode 100644 node_modules/micromark/dist/compile/html.js create mode 100644 node_modules/micromark/dist/constant/assign.js create mode 100644 node_modules/micromark/dist/constant/constants.d.ts create mode 100644 node_modules/micromark/dist/constant/constants.js create mode 100644 node_modules/micromark/dist/constant/from-char-code.js create mode 100644 node_modules/micromark/dist/constant/has-own-property.js create mode 100644 node_modules/micromark/dist/constant/html-block-names.js create mode 100644 node_modules/micromark/dist/constant/html-raw-names.js create mode 100644 node_modules/micromark/dist/constant/splice.js create mode 100644 node_modules/micromark/dist/constant/types.d.ts create mode 100644 node_modules/micromark/dist/constant/types.js create mode 100644 node_modules/micromark/dist/constant/unicode-punctuation-regex.js create mode 100644 node_modules/micromark/dist/constructs.js create mode 100644 node_modules/micromark/dist/index.d.ts create mode 100644 node_modules/micromark/dist/index.js create mode 100644 node_modules/micromark/dist/initialize/content.js create mode 100644 node_modules/micromark/dist/initialize/document.js create mode 100644 node_modules/micromark/dist/initialize/flow.js create mode 100644 node_modules/micromark/dist/initialize/text.js create mode 100644 node_modules/micromark/dist/parse.d.ts create mode 100644 node_modules/micromark/dist/parse.js create mode 100644 node_modules/micromark/dist/postprocess.d.ts create mode 100644 node_modules/micromark/dist/postprocess.js create mode 100644 node_modules/micromark/dist/preprocess.d.ts create mode 100644 node_modules/micromark/dist/preprocess.js create mode 100644 node_modules/micromark/dist/shared-types.d.ts create mode 100644 node_modules/micromark/dist/stream.d.ts create mode 100644 node_modules/micromark/dist/stream.js create mode 100644 node_modules/micromark/dist/tokenize/attention.js create mode 100644 node_modules/micromark/dist/tokenize/autolink.js create mode 100644 node_modules/micromark/dist/tokenize/block-quote.js create mode 100644 node_modules/micromark/dist/tokenize/character-escape.js create mode 100644 node_modules/micromark/dist/tokenize/character-reference.js create mode 100644 node_modules/micromark/dist/tokenize/code-fenced.js create mode 100644 node_modules/micromark/dist/tokenize/code-indented.js create mode 100644 node_modules/micromark/dist/tokenize/code-text.js create mode 100644 node_modules/micromark/dist/tokenize/content.js create mode 100644 node_modules/micromark/dist/tokenize/definition.js create mode 100644 node_modules/micromark/dist/tokenize/factory-destination.js create mode 100644 node_modules/micromark/dist/tokenize/factory-label.js create mode 100644 node_modules/micromark/dist/tokenize/factory-space.js create mode 100644 node_modules/micromark/dist/tokenize/factory-title.js create mode 100644 node_modules/micromark/dist/tokenize/factory-whitespace.js create mode 100644 node_modules/micromark/dist/tokenize/hard-break-escape.js create mode 100644 node_modules/micromark/dist/tokenize/heading-atx.js create mode 100644 node_modules/micromark/dist/tokenize/html-flow.js create mode 100644 node_modules/micromark/dist/tokenize/html-text.js create mode 100644 node_modules/micromark/dist/tokenize/label-end.js create mode 100644 node_modules/micromark/dist/tokenize/label-start-image.js create mode 100644 node_modules/micromark/dist/tokenize/label-start-link.js create mode 100644 node_modules/micromark/dist/tokenize/line-ending.js create mode 100644 node_modules/micromark/dist/tokenize/list.js create mode 100644 node_modules/micromark/dist/tokenize/partial-blank-line.js create mode 100644 node_modules/micromark/dist/tokenize/setext-underline.js create mode 100644 node_modules/micromark/dist/tokenize/thematic-break.js create mode 100644 node_modules/micromark/dist/util/chunked-push.js create mode 100644 node_modules/micromark/dist/util/chunked-splice.js create mode 100644 node_modules/micromark/dist/util/classify-character.js create mode 100644 node_modules/micromark/dist/util/combine-extensions.js create mode 100644 node_modules/micromark/dist/util/combine-html-extensions.js create mode 100644 node_modules/micromark/dist/util/create-tokenizer.js create mode 100644 node_modules/micromark/dist/util/miniflat.js create mode 100644 node_modules/micromark/dist/util/move-point.js create mode 100644 node_modules/micromark/dist/util/normalize-identifier.js create mode 100644 node_modules/micromark/dist/util/normalize-uri.js create mode 100644 node_modules/micromark/dist/util/prefix-size.js create mode 100644 node_modules/micromark/dist/util/regex-check.js create mode 100644 node_modules/micromark/dist/util/resolve-all.js create mode 100644 node_modules/micromark/dist/util/safe-from-int.js create mode 100644 node_modules/micromark/dist/util/serialize-chunks.js create mode 100644 node_modules/micromark/dist/util/shallow.js create mode 100644 node_modules/micromark/dist/util/size-chunks.js create mode 100644 node_modules/micromark/dist/util/slice-chunks.js create mode 100644 node_modules/micromark/dist/util/subtokenize.js create mode 100644 node_modules/micromark/index.d.ts create mode 100644 node_modules/micromark/index.js create mode 100644 node_modules/micromark/index.mjs create mode 100644 node_modules/micromark/lib/character/ascii-alpha.js create mode 100644 node_modules/micromark/lib/character/ascii-alpha.mjs create mode 100644 node_modules/micromark/lib/character/ascii-alphanumeric.js create mode 100644 node_modules/micromark/lib/character/ascii-alphanumeric.mjs create mode 100644 node_modules/micromark/lib/character/ascii-atext.js create mode 100644 node_modules/micromark/lib/character/ascii-atext.mjs create mode 100644 node_modules/micromark/lib/character/ascii-control.js create mode 100644 node_modules/micromark/lib/character/ascii-control.mjs create mode 100644 node_modules/micromark/lib/character/ascii-digit.js create mode 100644 node_modules/micromark/lib/character/ascii-digit.mjs create mode 100644 node_modules/micromark/lib/character/ascii-hex-digit.js create mode 100644 node_modules/micromark/lib/character/ascii-hex-digit.mjs create mode 100644 node_modules/micromark/lib/character/ascii-punctuation.js create mode 100644 node_modules/micromark/lib/character/ascii-punctuation.mjs create mode 100644 node_modules/micromark/lib/character/codes.d.ts create mode 100644 node_modules/micromark/lib/character/codes.js create mode 100644 node_modules/micromark/lib/character/codes.mjs create mode 100644 node_modules/micromark/lib/character/markdown-line-ending-or-space.js create mode 100644 node_modules/micromark/lib/character/markdown-line-ending-or-space.mjs create mode 100644 node_modules/micromark/lib/character/markdown-line-ending.js create mode 100644 node_modules/micromark/lib/character/markdown-line-ending.mjs create mode 100644 node_modules/micromark/lib/character/markdown-space.js create mode 100644 node_modules/micromark/lib/character/markdown-space.mjs create mode 100644 node_modules/micromark/lib/character/unicode-punctuation.js create mode 100644 node_modules/micromark/lib/character/unicode-punctuation.mjs create mode 100644 node_modules/micromark/lib/character/unicode-whitespace.js create mode 100644 node_modules/micromark/lib/character/unicode-whitespace.mjs create mode 100644 node_modules/micromark/lib/character/values.d.ts create mode 100644 node_modules/micromark/lib/character/values.js create mode 100644 node_modules/micromark/lib/character/values.mjs create mode 100644 node_modules/micromark/lib/compile/html.js create mode 100644 node_modules/micromark/lib/compile/html.mjs create mode 100644 node_modules/micromark/lib/constant/assign.js create mode 100644 node_modules/micromark/lib/constant/assign.mjs create mode 100644 node_modules/micromark/lib/constant/constants.d.ts create mode 100644 node_modules/micromark/lib/constant/constants.js create mode 100644 node_modules/micromark/lib/constant/constants.mjs create mode 100644 node_modules/micromark/lib/constant/from-char-code.js create mode 100644 node_modules/micromark/lib/constant/from-char-code.mjs create mode 100644 node_modules/micromark/lib/constant/has-own-property.js create mode 100644 node_modules/micromark/lib/constant/has-own-property.mjs create mode 100644 node_modules/micromark/lib/constant/html-block-names.js create mode 100644 node_modules/micromark/lib/constant/html-block-names.mjs create mode 100644 node_modules/micromark/lib/constant/html-raw-names.js create mode 100644 node_modules/micromark/lib/constant/html-raw-names.mjs create mode 100644 node_modules/micromark/lib/constant/splice.js create mode 100644 node_modules/micromark/lib/constant/splice.mjs create mode 100644 node_modules/micromark/lib/constant/types.d.ts create mode 100644 node_modules/micromark/lib/constant/types.js create mode 100644 node_modules/micromark/lib/constant/types.mjs create mode 100644 node_modules/micromark/lib/constant/unicode-punctuation-regex.js create mode 100644 node_modules/micromark/lib/constant/unicode-punctuation-regex.mjs create mode 100644 node_modules/micromark/lib/constructs.js create mode 100644 node_modules/micromark/lib/constructs.mjs create mode 100644 node_modules/micromark/lib/index.d.ts create mode 100644 node_modules/micromark/lib/index.js create mode 100644 node_modules/micromark/lib/index.mjs create mode 100644 node_modules/micromark/lib/initialize/content.js create mode 100644 node_modules/micromark/lib/initialize/content.mjs create mode 100644 node_modules/micromark/lib/initialize/document.js create mode 100644 node_modules/micromark/lib/initialize/document.mjs create mode 100644 node_modules/micromark/lib/initialize/flow.js create mode 100644 node_modules/micromark/lib/initialize/flow.mjs create mode 100644 node_modules/micromark/lib/initialize/text.js create mode 100644 node_modules/micromark/lib/initialize/text.mjs create mode 100644 node_modules/micromark/lib/parse.d.ts create mode 100644 node_modules/micromark/lib/parse.js create mode 100644 node_modules/micromark/lib/parse.mjs create mode 100644 node_modules/micromark/lib/postprocess.d.ts create mode 100644 node_modules/micromark/lib/postprocess.js create mode 100644 node_modules/micromark/lib/postprocess.mjs create mode 100644 node_modules/micromark/lib/preprocess.d.ts create mode 100644 node_modules/micromark/lib/preprocess.js create mode 100644 node_modules/micromark/lib/preprocess.mjs create mode 100644 node_modules/micromark/lib/shared-types.d.ts create mode 100644 node_modules/micromark/lib/stream.d.ts create mode 100644 node_modules/micromark/lib/stream.js create mode 100644 node_modules/micromark/lib/stream.mjs create mode 100644 node_modules/micromark/lib/tokenize/attention.js create mode 100644 node_modules/micromark/lib/tokenize/attention.mjs create mode 100644 node_modules/micromark/lib/tokenize/autolink.js create mode 100644 node_modules/micromark/lib/tokenize/autolink.mjs create mode 100644 node_modules/micromark/lib/tokenize/block-quote.js create mode 100644 node_modules/micromark/lib/tokenize/block-quote.mjs create mode 100644 node_modules/micromark/lib/tokenize/character-escape.js create mode 100644 node_modules/micromark/lib/tokenize/character-escape.mjs create mode 100644 node_modules/micromark/lib/tokenize/character-reference.js create mode 100644 node_modules/micromark/lib/tokenize/character-reference.mjs create mode 100644 node_modules/micromark/lib/tokenize/code-fenced.js create mode 100644 node_modules/micromark/lib/tokenize/code-fenced.mjs create mode 100644 node_modules/micromark/lib/tokenize/code-indented.js create mode 100644 node_modules/micromark/lib/tokenize/code-indented.mjs create mode 100644 node_modules/micromark/lib/tokenize/code-text.js create mode 100644 node_modules/micromark/lib/tokenize/code-text.mjs create mode 100644 node_modules/micromark/lib/tokenize/content.js create mode 100644 node_modules/micromark/lib/tokenize/content.mjs create mode 100644 node_modules/micromark/lib/tokenize/definition.js create mode 100644 node_modules/micromark/lib/tokenize/definition.mjs create mode 100644 node_modules/micromark/lib/tokenize/factory-destination.js create mode 100644 node_modules/micromark/lib/tokenize/factory-destination.mjs create mode 100644 node_modules/micromark/lib/tokenize/factory-label.js create mode 100644 node_modules/micromark/lib/tokenize/factory-label.mjs create mode 100644 node_modules/micromark/lib/tokenize/factory-space.js create mode 100644 node_modules/micromark/lib/tokenize/factory-space.mjs create mode 100644 node_modules/micromark/lib/tokenize/factory-title.js create mode 100644 node_modules/micromark/lib/tokenize/factory-title.mjs create mode 100644 node_modules/micromark/lib/tokenize/factory-whitespace.js create mode 100644 node_modules/micromark/lib/tokenize/factory-whitespace.mjs create mode 100644 node_modules/micromark/lib/tokenize/hard-break-escape.js create mode 100644 node_modules/micromark/lib/tokenize/hard-break-escape.mjs create mode 100644 node_modules/micromark/lib/tokenize/heading-atx.js create mode 100644 node_modules/micromark/lib/tokenize/heading-atx.mjs create mode 100644 node_modules/micromark/lib/tokenize/html-flow.js create mode 100644 node_modules/micromark/lib/tokenize/html-flow.mjs create mode 100644 node_modules/micromark/lib/tokenize/html-text.js create mode 100644 node_modules/micromark/lib/tokenize/html-text.mjs create mode 100644 node_modules/micromark/lib/tokenize/label-end.js create mode 100644 node_modules/micromark/lib/tokenize/label-end.mjs create mode 100644 node_modules/micromark/lib/tokenize/label-start-image.js create mode 100644 node_modules/micromark/lib/tokenize/label-start-image.mjs create mode 100644 node_modules/micromark/lib/tokenize/label-start-link.js create mode 100644 node_modules/micromark/lib/tokenize/label-start-link.mjs create mode 100644 node_modules/micromark/lib/tokenize/line-ending.js create mode 100644 node_modules/micromark/lib/tokenize/line-ending.mjs create mode 100644 node_modules/micromark/lib/tokenize/list.js create mode 100644 node_modules/micromark/lib/tokenize/list.mjs create mode 100644 node_modules/micromark/lib/tokenize/partial-blank-line.js create mode 100644 node_modules/micromark/lib/tokenize/partial-blank-line.mjs create mode 100644 node_modules/micromark/lib/tokenize/setext-underline.js create mode 100644 node_modules/micromark/lib/tokenize/setext-underline.mjs create mode 100644 node_modules/micromark/lib/tokenize/thematic-break.js create mode 100644 node_modules/micromark/lib/tokenize/thematic-break.mjs create mode 100644 node_modules/micromark/lib/util/chunked-push.js create mode 100644 node_modules/micromark/lib/util/chunked-push.mjs create mode 100644 node_modules/micromark/lib/util/chunked-splice.js create mode 100644 node_modules/micromark/lib/util/chunked-splice.mjs create mode 100644 node_modules/micromark/lib/util/classify-character.js create mode 100644 node_modules/micromark/lib/util/classify-character.mjs create mode 100644 node_modules/micromark/lib/util/combine-extensions.js create mode 100644 node_modules/micromark/lib/util/combine-extensions.mjs create mode 100644 node_modules/micromark/lib/util/combine-html-extensions.js create mode 100644 node_modules/micromark/lib/util/combine-html-extensions.mjs create mode 100644 node_modules/micromark/lib/util/create-tokenizer.js create mode 100644 node_modules/micromark/lib/util/create-tokenizer.mjs create mode 100644 node_modules/micromark/lib/util/miniflat.js create mode 100644 node_modules/micromark/lib/util/miniflat.mjs create mode 100644 node_modules/micromark/lib/util/move-point.js create mode 100644 node_modules/micromark/lib/util/move-point.mjs create mode 100644 node_modules/micromark/lib/util/normalize-identifier.js create mode 100644 node_modules/micromark/lib/util/normalize-identifier.mjs create mode 100644 node_modules/micromark/lib/util/normalize-uri.js create mode 100644 node_modules/micromark/lib/util/normalize-uri.mjs create mode 100644 node_modules/micromark/lib/util/prefix-size.js create mode 100644 node_modules/micromark/lib/util/prefix-size.mjs create mode 100644 node_modules/micromark/lib/util/regex-check.js create mode 100644 node_modules/micromark/lib/util/regex-check.mjs create mode 100644 node_modules/micromark/lib/util/resolve-all.js create mode 100644 node_modules/micromark/lib/util/resolve-all.mjs create mode 100644 node_modules/micromark/lib/util/safe-from-int.js create mode 100644 node_modules/micromark/lib/util/safe-from-int.mjs create mode 100644 node_modules/micromark/lib/util/serialize-chunks.js create mode 100644 node_modules/micromark/lib/util/serialize-chunks.mjs create mode 100644 node_modules/micromark/lib/util/shallow.js create mode 100644 node_modules/micromark/lib/util/shallow.mjs create mode 100644 node_modules/micromark/lib/util/size-chunks.js create mode 100644 node_modules/micromark/lib/util/size-chunks.mjs create mode 100644 node_modules/micromark/lib/util/slice-chunks.js create mode 100644 node_modules/micromark/lib/util/slice-chunks.mjs create mode 100644 node_modules/micromark/lib/util/subtokenize.js create mode 100644 node_modules/micromark/lib/util/subtokenize.mjs create mode 100644 node_modules/micromark/license create mode 100644 node_modules/micromark/package.json create mode 100644 node_modules/micromark/readme.md create mode 100644 node_modules/micromark/stream.d.ts create mode 100644 node_modules/micromark/stream.js create mode 100644 node_modules/micromark/stream.mjs create mode 100644 node_modules/parse-entities/types/index.d.ts create mode 100644 node_modules/remark-footnotes/index.js create mode 100644 node_modules/remark-footnotes/license create mode 100644 node_modules/remark-footnotes/package.json create mode 100644 node_modules/remark-footnotes/readme.md create mode 100644 node_modules/remark-footnotes/types/index.d.ts delete mode 100644 node_modules/remark-frontmatter/lib/compile.js delete mode 100644 node_modules/remark-frontmatter/lib/fence.js delete mode 100644 node_modules/remark-frontmatter/lib/parse.js create mode 100644 node_modules/remark-frontmatter/types/index.d.ts create mode 100644 node_modules/remark-gfm/index.js create mode 100644 node_modules/remark-gfm/license create mode 100644 node_modules/remark-gfm/package.json create mode 100644 node_modules/remark-gfm/readme.md create mode 100644 node_modules/remark-gfm/types/index.d.ts delete mode 100644 node_modules/remark-parse/lib/block-elements.json delete mode 100644 node_modules/remark-parse/lib/decode.js delete mode 100644 node_modules/remark-parse/lib/defaults.js delete mode 100644 node_modules/remark-parse/lib/locate/break.js delete mode 100644 node_modules/remark-parse/lib/locate/code-inline.js delete mode 100644 node_modules/remark-parse/lib/locate/delete.js delete mode 100644 node_modules/remark-parse/lib/locate/emphasis.js delete mode 100644 node_modules/remark-parse/lib/locate/escape.js delete mode 100644 node_modules/remark-parse/lib/locate/link.js delete mode 100644 node_modules/remark-parse/lib/locate/strong.js delete mode 100644 node_modules/remark-parse/lib/locate/tag.js delete mode 100644 node_modules/remark-parse/lib/locate/url.js delete mode 100644 node_modules/remark-parse/lib/parse.js delete mode 100644 node_modules/remark-parse/lib/parser.js delete mode 100644 node_modules/remark-parse/lib/set-options.js delete mode 100644 node_modules/remark-parse/lib/tokenize/auto-link.js delete mode 100644 node_modules/remark-parse/lib/tokenize/blockquote.js delete mode 100644 node_modules/remark-parse/lib/tokenize/break.js delete mode 100644 node_modules/remark-parse/lib/tokenize/code-fenced.js delete mode 100644 node_modules/remark-parse/lib/tokenize/code-indented.js delete mode 100644 node_modules/remark-parse/lib/tokenize/code-inline.js delete mode 100644 node_modules/remark-parse/lib/tokenize/definition.js delete mode 100644 node_modules/remark-parse/lib/tokenize/delete.js delete mode 100644 node_modules/remark-parse/lib/tokenize/emphasis.js delete mode 100644 node_modules/remark-parse/lib/tokenize/escape.js delete mode 100644 node_modules/remark-parse/lib/tokenize/footnote-definition.js delete mode 100644 node_modules/remark-parse/lib/tokenize/heading-atx.js delete mode 100644 node_modules/remark-parse/lib/tokenize/heading-setext.js delete mode 100644 node_modules/remark-parse/lib/tokenize/html-block.js delete mode 100644 node_modules/remark-parse/lib/tokenize/html-inline.js delete mode 100644 node_modules/remark-parse/lib/tokenize/link.js delete mode 100644 node_modules/remark-parse/lib/tokenize/list.js delete mode 100644 node_modules/remark-parse/lib/tokenize/newline.js delete mode 100644 node_modules/remark-parse/lib/tokenize/paragraph.js delete mode 100644 node_modules/remark-parse/lib/tokenize/reference.js delete mode 100644 node_modules/remark-parse/lib/tokenize/strong.js delete mode 100644 node_modules/remark-parse/lib/tokenize/table.js delete mode 100644 node_modules/remark-parse/lib/tokenize/text.js delete mode 100644 node_modules/remark-parse/lib/tokenize/thematic-break.js delete mode 100644 node_modules/remark-parse/lib/tokenize/url.js delete mode 100644 node_modules/remark-parse/lib/tokenizer.js delete mode 100644 node_modules/remark-parse/lib/unescape.js delete mode 100644 node_modules/remark-parse/lib/util/get-indentation.js delete mode 100644 node_modules/remark-parse/lib/util/html.js delete mode 100644 node_modules/remark-parse/lib/util/interrupt.js delete mode 100644 node_modules/remark-parse/lib/util/normalize.js delete mode 100644 node_modules/remark-parse/lib/util/remove-indentation.js create mode 100644 node_modules/remark-parse/types/index.d.ts delete mode 100755 node_modules/replace-ext/LICENSE delete mode 100644 node_modules/replace-ext/README.md delete mode 100644 node_modules/replace-ext/index.js delete mode 100644 node_modules/replace-ext/package.json delete mode 100644 node_modules/state-toggle/index.js delete mode 100644 node_modules/state-toggle/readme.md delete mode 100644 node_modules/structured-source/README.md delete mode 100644 node_modules/structured-source/lib/index.js delete mode 100644 node_modules/structured-source/lib/structured-source.js delete mode 100644 node_modules/structured-source/package.json delete mode 100644 node_modules/trim-trailing-lines/index.js delete mode 100644 node_modules/trim-trailing-lines/readme.md delete mode 100644 node_modules/trim/.npmignore delete mode 100644 node_modules/trim/History.md delete mode 100644 node_modules/trim/Makefile delete mode 100644 node_modules/trim/Readme.md delete mode 100644 node_modules/trim/component.json delete mode 100644 node_modules/trim/index.js delete mode 100644 node_modules/trim/package.json delete mode 100644 node_modules/unherit/index.js delete mode 100644 node_modules/unherit/license delete mode 100644 node_modules/unherit/package.json delete mode 100644 node_modules/unherit/readme.md create mode 100644 node_modules/unified/changelog.md create mode 100644 node_modules/unified/types/ts3.4/index.d.ts create mode 100644 node_modules/unified/types/ts4.0/index.d.ts create mode 100644 node_modules/unist-util-is/convert.d.ts create mode 100644 node_modules/unist-util-is/index.d.ts delete mode 100644 node_modules/unist-util-remove-position/index.js delete mode 100644 node_modules/unist-util-remove-position/package.json delete mode 100644 node_modules/unist-util-remove-position/readme.md create mode 100644 node_modules/unist-util-stringify-position/types/index.d.ts create mode 100644 node_modules/unist-util-visit-parents/color.browser.js create mode 100644 node_modules/unist-util-visit-parents/color.js create mode 100644 node_modules/unist-util-visit-parents/types/index.d.ts delete mode 100644 node_modules/unist-util-visit/index.js delete mode 100644 node_modules/unist-util-visit/package.json delete mode 100644 node_modules/unist-util-visit/readme.md delete mode 100644 node_modules/vfile-location/index.js delete mode 100644 node_modules/vfile-location/license delete mode 100644 node_modules/vfile-location/package.json delete mode 100644 node_modules/vfile-location/readme.md create mode 100644 node_modules/vfile-message/types/index.d.ts create mode 100644 node_modules/vfile/changelog.md create mode 100644 node_modules/vfile/lib/core.js create mode 100644 node_modules/vfile/lib/index.js create mode 100644 node_modules/vfile/lib/minpath.browser.js create mode 100644 node_modules/vfile/lib/minpath.js create mode 100644 node_modules/vfile/lib/minproc.browser.js create mode 100644 node_modules/vfile/lib/minproc.js create mode 100644 node_modules/vfile/types/index.d.ts delete mode 100644 node_modules/x-is-string/.npmignore delete mode 100644 node_modules/x-is-string/.travis.yml delete mode 100644 node_modules/x-is-string/LICENCE delete mode 100644 node_modules/x-is-string/README.md delete mode 100644 node_modules/x-is-string/index.js delete mode 100644 node_modules/x-is-string/package.json delete mode 100644 node_modules/x-is-string/test/index.js delete mode 100644 node_modules/xtend/.jshintrc delete mode 100644 node_modules/xtend/LICENSE delete mode 100644 node_modules/xtend/README.md delete mode 100644 node_modules/xtend/immutable.js delete mode 100644 node_modules/xtend/mutable.js delete mode 100644 node_modules/xtend/package.json delete mode 100644 node_modules/xtend/test.js create mode 100644 node_modules/zwitch/index.js rename node_modules/{is-whitespace-character => zwitch}/license (100%) rename node_modules/{state-toggle => zwitch}/package.json (74%) create mode 100644 node_modules/zwitch/readme.md diff --git a/build.json b/build.json index 206d7f73..7564bd3e 100644 --- a/build.json +++ b/build.json @@ -1 +1 @@ -{"owner":"technote-space","repo":"toc-generator","sha":"742be1651811aaecc882afe21cc208728ae74e74","ref":"refs/tags/test/v4.1.3.821895890","tagName":"test/v4.1.3.821895890","branch":"gh-actions","tags":["test/v4.1.3.821895890","test/v4.1.3","test/v4.1","test/v4"],"updated_at":"2021-05-07T23:35:18.825Z"} \ No newline at end of file +{"owner":"technote-space","repo":"toc-generator","sha":"FETCH_HEAD","ref":"refs/heads/master","tagName":"test/v4.1.3","branch":"gh-actions","tags":["test/v4.1.3","test/v4.1","test/v4"],"updated_at":"2021-06-22T16:39:48.842Z"} \ No newline at end of file diff --git a/node_modules/.yarn-integrity b/node_modules/.yarn-integrity index fb2e04bd..be13b378 100644 --- a/node_modules/.yarn-integrity +++ b/node_modules/.yarn-integrity @@ -1,5 +1,5 @@ { - "systemParams": "linux-x64-83", + "systemParams": "darwin-x64-88", "modulesFolders": [ "node_modules" ], @@ -8,64 +8,72 @@ ], "linkedModules": [], "topLevelPatterns": [ - "@actions/core@^1.2.7", - "@actions/github@^4.0.0", - "@commitlint/cli@^12.1.1", - "@commitlint/config-conventional@^12.1.1", - "@technote-space/doctoc@^2.4.5", - "@technote-space/github-action-helper@^5.2.7", - "@technote-space/github-action-log-helper@^0.1.24", - "@technote-space/github-action-pr-helper@^2.2.6", - "@technote-space/github-action-test-helper@^0.7.10", + "@actions/core@^1.4.0", + "@actions/github@^5.0.0", + "@commitlint/cli@^12.1.4", + "@commitlint/config-conventional@^12.1.4", + "@technote-space/doctoc@^2.4.7", + "@technote-space/github-action-helper@^5.2.9", + "@technote-space/github-action-log-helper@^0.1.28", + "@technote-space/github-action-pr-helper@^2.2.7", + "@technote-space/github-action-test-helper@^0.7.13", "@technote-space/release-github-actions-cli@^1.8.5", "@types/jest@^26.0.23", - "@types/node@^15.0.2", - "@typescript-eslint/eslint-plugin@^4.22.1", - "@typescript-eslint/parser@^4.22.1", - "eslint@^7.25.0", + "@types/node@^15.12.4", + "@typescript-eslint/eslint-plugin@^4.28.0", + "@typescript-eslint/parser@^4.28.0", + "eslint@^7.29.0", "fast-glob@^3.2.5", "husky@^6.0.0", - "jest-circus@^26.6.3", - "jest@^26.6.3", + "jest-circus@^27.0.5", + "jest@^27.0.5", "lint-staged@^11.0.0", - "nock@^13.0.11", + "nock@^13.1.0", "pinst@^2.1.6", - "ts-jest@^26.5.6", - "typescript@^4.2.4" + "ts-jest@^27.0.3", + "typescript@^4.3.4" ], "lockfileEntries": { - "@actions/core@^1.2.7": "https://registry.yarnpkg.com/@actions/core/-/core-1.2.7.tgz#594f8c45b213f0146e4be7eda8ae5cf4e198e5ab", - "@actions/github@^4.0.0": "https://registry.yarnpkg.com/@actions/github/-/github-4.0.0.tgz#d520483151a2bf5d2dc9cd0f20f9ac3a2e458816", - "@actions/http-client@^1.0.8": "https://registry.yarnpkg.com/@actions/http-client/-/http-client-1.0.11.tgz#c58b12e9aa8b159ee39e7dd6cbd0e91d905633c0", + "@actions/core@^1.2.7": "https://registry.yarnpkg.com/@actions/core/-/core-1.4.0.tgz#cf2e6ee317e314b03886adfeb20e448d50d6e524", + "@actions/core@^1.4.0": "https://registry.yarnpkg.com/@actions/core/-/core-1.4.0.tgz#cf2e6ee317e314b03886adfeb20e448d50d6e524", + "@actions/github@^5.0.0": "https://registry.yarnpkg.com/@actions/github/-/github-5.0.0.tgz#1754127976c50bd88b2e905f10d204d76d1472f8", + "@actions/http-client@^1.0.11": "https://registry.yarnpkg.com/@actions/http-client/-/http-client-1.0.11.tgz#c58b12e9aa8b159ee39e7dd6cbd0e91d905633c0", "@babel/code-frame@7.12.11": "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f", - "@babel/code-frame@^7.0.0": "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.13.tgz#dcfc826beef65e75c50e21d3837d7d95798dd658", - "@babel/code-frame@^7.12.13": "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.13.tgz#dcfc826beef65e75c50e21d3837d7d95798dd658", - "@babel/compat-data@^7.13.15": "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.14.0.tgz#a901128bce2ad02565df95e6ecbf195cf9465919", - "@babel/core@^7.1.0": "https://registry.yarnpkg.com/@babel/core/-/core-7.14.0.tgz#47299ff3ec8d111b493f1a9d04bf88c04e728d88", - "@babel/core@^7.7.5": "https://registry.yarnpkg.com/@babel/core/-/core-7.14.0.tgz#47299ff3ec8d111b493f1a9d04bf88c04e728d88", - "@babel/generator@^7.14.0": "https://registry.yarnpkg.com/@babel/generator/-/generator-7.14.1.tgz#1f99331babd65700183628da186f36f63d615c93", - "@babel/helper-compilation-targets@^7.13.16": "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.13.16.tgz#6e91dccf15e3f43e5556dffe32d860109887563c", - "@babel/helper-function-name@^7.12.13": "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.12.13.tgz#93ad656db3c3c2232559fd7b2c3dbdcbe0eb377a", - "@babel/helper-get-function-arity@^7.12.13": "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.12.13.tgz#bc63451d403a3b3082b97e1d8b3fe5bd4091e583", - "@babel/helper-member-expression-to-functions@^7.13.12": "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.13.12.tgz#dfe368f26d426a07299d8d6513821768216e6d72", - "@babel/helper-module-imports@^7.13.12": "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.13.12.tgz#c6a369a6f3621cb25da014078684da9196b61977", - "@babel/helper-module-transforms@^7.14.0": "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.14.0.tgz#8fcf78be220156f22633ee204ea81f73f826a8ad", - "@babel/helper-optimise-call-expression@^7.12.13": "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.13.tgz#5c02d171b4c8615b1e7163f888c1c81c30a2aaea", - "@babel/helper-plugin-utils@^7.0.0": "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.13.0.tgz#806526ce125aed03373bc416a828321e3a6a33af", - "@babel/helper-plugin-utils@^7.10.4": "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.13.0.tgz#806526ce125aed03373bc416a828321e3a6a33af", - "@babel/helper-plugin-utils@^7.12.13": "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.13.0.tgz#806526ce125aed03373bc416a828321e3a6a33af", - "@babel/helper-plugin-utils@^7.8.0": "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.13.0.tgz#806526ce125aed03373bc416a828321e3a6a33af", - "@babel/helper-replace-supers@^7.13.12": "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.13.12.tgz#6442f4c1ad912502481a564a7386de0c77ff3804", - "@babel/helper-simple-access@^7.13.12": "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.13.12.tgz#dd6c538afb61819d205a012c31792a39c7a5eaf6", - "@babel/helper-split-export-declaration@^7.12.13": "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.12.13.tgz#e9430be00baf3e88b0e13e6f9d4eaf2136372b05", - "@babel/helper-validator-identifier@^7.14.0": "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.0.tgz#d26cad8a47c65286b15df1547319a5d0bcf27288", - "@babel/helper-validator-option@^7.12.17": "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.12.17.tgz#d1fbf012e1a79b7eebbfdc6d270baaf8d9eb9831", - "@babel/helpers@^7.14.0": "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.14.0.tgz#ea9b6be9478a13d6f961dbb5f36bf75e2f3b8f62", - "@babel/highlight@^7.10.4": "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.14.0.tgz#3197e375711ef6bf834e67d0daec88e4f46113cf", - "@babel/highlight@^7.12.13": "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.14.0.tgz#3197e375711ef6bf834e67d0daec88e4f46113cf", - "@babel/parser@^7.1.0": "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.1.tgz#1bd644b5db3f5797c4479d89ec1817fe02b84c47", - "@babel/parser@^7.12.13": "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.1.tgz#1bd644b5db3f5797c4479d89ec1817fe02b84c47", - "@babel/parser@^7.14.0": "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.1.tgz#1bd644b5db3f5797c4479d89ec1817fe02b84c47", + "@babel/code-frame@^7.0.0": "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.14.5.tgz#23b08d740e83f49c5e59945fbf1b43e80bbf4edb", + "@babel/code-frame@^7.12.13": "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.14.5.tgz#23b08d740e83f49c5e59945fbf1b43e80bbf4edb", + "@babel/code-frame@^7.14.5": "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.14.5.tgz#23b08d740e83f49c5e59945fbf1b43e80bbf4edb", + "@babel/compat-data@^7.14.5": "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.14.7.tgz#7b047d7a3a89a67d2258dc61f604f098f1bc7e08", + "@babel/core@^7.1.0": "https://registry.yarnpkg.com/@babel/core/-/core-7.14.6.tgz#e0814ec1a950032ff16c13a2721de39a8416fcab", + "@babel/core@^7.7.2": "https://registry.yarnpkg.com/@babel/core/-/core-7.14.6.tgz#e0814ec1a950032ff16c13a2721de39a8416fcab", + "@babel/core@^7.7.5": "https://registry.yarnpkg.com/@babel/core/-/core-7.14.6.tgz#e0814ec1a950032ff16c13a2721de39a8416fcab", + "@babel/generator@^7.14.5": "https://registry.yarnpkg.com/@babel/generator/-/generator-7.14.5.tgz#848d7b9f031caca9d0cd0af01b063f226f52d785", + "@babel/generator@^7.7.2": "https://registry.yarnpkg.com/@babel/generator/-/generator-7.14.5.tgz#848d7b9f031caca9d0cd0af01b063f226f52d785", + "@babel/helper-compilation-targets@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.14.5.tgz#7a99c5d0967911e972fe2c3411f7d5b498498ecf", + "@babel/helper-function-name@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.14.5.tgz#89e2c474972f15d8e233b52ee8c480e2cfcd50c4", + "@babel/helper-get-function-arity@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.14.5.tgz#25fbfa579b0937eee1f3b805ece4ce398c431815", + "@babel/helper-hoist-variables@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.14.5.tgz#e0dd27c33a78e577d7c8884916a3e7ef1f7c7f8d", + "@babel/helper-member-expression-to-functions@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.14.7.tgz#97e56244beb94211fe277bd818e3a329c66f7970", + "@babel/helper-module-imports@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.14.5.tgz#6d1a44df6a38c957aa7c312da076429f11b422f3", + "@babel/helper-module-transforms@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.14.5.tgz#7de42f10d789b423eb902ebd24031ca77cb1e10e", + "@babel/helper-optimise-call-expression@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.14.5.tgz#f27395a8619e0665b3f0364cddb41c25d71b499c", + "@babel/helper-plugin-utils@^7.0.0": "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.14.5.tgz#5ac822ce97eec46741ab70a517971e443a70c5a9", + "@babel/helper-plugin-utils@^7.10.4": "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.14.5.tgz#5ac822ce97eec46741ab70a517971e443a70c5a9", + "@babel/helper-plugin-utils@^7.12.13": "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.14.5.tgz#5ac822ce97eec46741ab70a517971e443a70c5a9", + "@babel/helper-plugin-utils@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.14.5.tgz#5ac822ce97eec46741ab70a517971e443a70c5a9", + "@babel/helper-plugin-utils@^7.8.0": "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.14.5.tgz#5ac822ce97eec46741ab70a517971e443a70c5a9", + "@babel/helper-replace-supers@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.14.5.tgz#0ecc0b03c41cd567b4024ea016134c28414abb94", + "@babel/helper-simple-access@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.14.5.tgz#66ea85cf53ba0b4e588ba77fc813f53abcaa41c4", + "@babel/helper-split-export-declaration@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.14.5.tgz#22b23a54ef51c2b7605d851930c1976dd0bc693a", + "@babel/helper-validator-identifier@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.5.tgz#d0f0e277c512e0c938277faa85a3968c9a44c0e8", + "@babel/helper-validator-option@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.14.5.tgz#6e72a1fff18d5dfcb878e1e62f1a021c4b72d5a3", + "@babel/helpers@^7.14.6": "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.14.6.tgz#5b58306b95f1b47e2a0199434fa8658fa6c21635", + "@babel/highlight@^7.10.4": "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.14.5.tgz#6861a52f03966405001f6aa534a01a24d99e8cd9", + "@babel/highlight@^7.14.5": "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.14.5.tgz#6861a52f03966405001f6aa534a01a24d99e8cd9", + "@babel/parser@^7.1.0": "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.7.tgz#6099720c8839ca865a2637e6c85852ead0bdb595", + "@babel/parser@^7.14.5": "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.7.tgz#6099720c8839ca865a2637e6c85852ead0bdb595", + "@babel/parser@^7.14.6": "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.7.tgz#6099720c8839ca865a2637e6c85852ead0bdb595", + "@babel/parser@^7.14.7": "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.7.tgz#6099720c8839ca865a2637e6c85852ead0bdb595", + "@babel/parser@^7.7.2": "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.7.tgz#6099720c8839ca865a2637e6c85852ead0bdb595", "@babel/plugin-syntax-async-generators@^7.8.4": "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d", "@babel/plugin-syntax-bigint@^7.8.3": "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea", "@babel/plugin-syntax-class-properties@^7.8.3": "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10", @@ -77,89 +85,89 @@ "@babel/plugin-syntax-object-rest-spread@^7.8.3": "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871", "@babel/plugin-syntax-optional-catch-binding@^7.8.3": "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1", "@babel/plugin-syntax-optional-chaining@^7.8.3": "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a", - "@babel/plugin-syntax-top-level-await@^7.8.3": "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.12.13.tgz#c5f0fa6e249f5b739727f923540cf7a806130178", - "@babel/template@^7.12.13": "https://registry.yarnpkg.com/@babel/template/-/template-7.12.13.tgz#530265be8a2589dbb37523844c5bcb55947fb327", - "@babel/template@^7.3.3": "https://registry.yarnpkg.com/@babel/template/-/template-7.12.13.tgz#530265be8a2589dbb37523844c5bcb55947fb327", - "@babel/traverse@^7.1.0": "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.14.0.tgz#cea0dc8ae7e2b1dec65f512f39f3483e8cc95aef", - "@babel/traverse@^7.13.0": "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.14.0.tgz#cea0dc8ae7e2b1dec65f512f39f3483e8cc95aef", - "@babel/traverse@^7.14.0": "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.14.0.tgz#cea0dc8ae7e2b1dec65f512f39f3483e8cc95aef", - "@babel/types@^7.0.0": "https://registry.yarnpkg.com/@babel/types/-/types-7.14.1.tgz#095bd12f1c08ab63eff6e8f7745fa7c9cc15a9db", - "@babel/types@^7.12.13": "https://registry.yarnpkg.com/@babel/types/-/types-7.14.1.tgz#095bd12f1c08ab63eff6e8f7745fa7c9cc15a9db", - "@babel/types@^7.13.12": "https://registry.yarnpkg.com/@babel/types/-/types-7.14.1.tgz#095bd12f1c08ab63eff6e8f7745fa7c9cc15a9db", - "@babel/types@^7.14.0": "https://registry.yarnpkg.com/@babel/types/-/types-7.14.1.tgz#095bd12f1c08ab63eff6e8f7745fa7c9cc15a9db", - "@babel/types@^7.14.1": "https://registry.yarnpkg.com/@babel/types/-/types-7.14.1.tgz#095bd12f1c08ab63eff6e8f7745fa7c9cc15a9db", - "@babel/types@^7.3.0": "https://registry.yarnpkg.com/@babel/types/-/types-7.14.1.tgz#095bd12f1c08ab63eff6e8f7745fa7c9cc15a9db", - "@babel/types@^7.3.3": "https://registry.yarnpkg.com/@babel/types/-/types-7.14.1.tgz#095bd12f1c08ab63eff6e8f7745fa7c9cc15a9db", + "@babel/plugin-syntax-top-level-await@^7.8.3": "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c", + "@babel/plugin-syntax-typescript@^7.7.2": "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.14.5.tgz#b82c6ce471b165b5ce420cf92914d6fb46225716", + "@babel/template@^7.14.5": "https://registry.yarnpkg.com/@babel/template/-/template-7.14.5.tgz#a9bc9d8b33354ff6e55a9c60d1109200a68974f4", + "@babel/template@^7.3.3": "https://registry.yarnpkg.com/@babel/template/-/template-7.14.5.tgz#a9bc9d8b33354ff6e55a9c60d1109200a68974f4", + "@babel/traverse@^7.1.0": "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.14.7.tgz#64007c9774cfdc3abd23b0780bc18a3ce3631753", + "@babel/traverse@^7.14.5": "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.14.7.tgz#64007c9774cfdc3abd23b0780bc18a3ce3631753", + "@babel/traverse@^7.7.2": "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.14.7.tgz#64007c9774cfdc3abd23b0780bc18a3ce3631753", + "@babel/types@^7.0.0": "https://registry.yarnpkg.com/@babel/types/-/types-7.14.5.tgz#3bb997ba829a2104cedb20689c4a5b8121d383ff", + "@babel/types@^7.14.5": "https://registry.yarnpkg.com/@babel/types/-/types-7.14.5.tgz#3bb997ba829a2104cedb20689c4a5b8121d383ff", + "@babel/types@^7.3.0": "https://registry.yarnpkg.com/@babel/types/-/types-7.14.5.tgz#3bb997ba829a2104cedb20689c4a5b8121d383ff", + "@babel/types@^7.3.3": "https://registry.yarnpkg.com/@babel/types/-/types-7.14.5.tgz#3bb997ba829a2104cedb20689c4a5b8121d383ff", "@bcoe/v8-coverage@^0.2.3": "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39", - "@cnakazawa/watch@^1.0.3": "https://registry.yarnpkg.com/@cnakazawa/watch/-/watch-1.0.4.tgz#f864ae85004d0fcab6f50be9141c4da368d1656a", - "@commitlint/cli@^12.1.1": "https://registry.yarnpkg.com/@commitlint/cli/-/cli-12.1.1.tgz#740370e557a8a17f415052821cdd5276ecb0ab98", - "@commitlint/config-conventional@^12.1.1": "https://registry.yarnpkg.com/@commitlint/config-conventional/-/config-conventional-12.1.1.tgz#73dd3b1a7912138420d248f334f15c94c250bc9e", - "@commitlint/ensure@^12.1.1": "https://registry.yarnpkg.com/@commitlint/ensure/-/ensure-12.1.1.tgz#bcefc85f7f8a41bb31f67d7a8966e322b47a6e43", - "@commitlint/execute-rule@^12.1.1": "https://registry.yarnpkg.com/@commitlint/execute-rule/-/execute-rule-12.1.1.tgz#8aad1d46fb78b3199e4ae36debdc93570bf765ea", - "@commitlint/format@^12.1.1": "https://registry.yarnpkg.com/@commitlint/format/-/format-12.1.1.tgz#a6b14f8605171374eecc2c463098d63c127ab7df", - "@commitlint/is-ignored@^12.1.1": "https://registry.yarnpkg.com/@commitlint/is-ignored/-/is-ignored-12.1.1.tgz#6075a5cd2dcda7b6ec93322f5dbe2142cfbb3248", - "@commitlint/lint@^12.1.1": "https://registry.yarnpkg.com/@commitlint/lint/-/lint-12.1.1.tgz#cdd898af6eadba8f9e71d7f1255b5a479a757078", - "@commitlint/load@^12.1.1": "https://registry.yarnpkg.com/@commitlint/load/-/load-12.1.1.tgz#5a7fb8be11e520931d1237c5e8dc401b7cc9c6c1", - "@commitlint/message@^12.1.1": "https://registry.yarnpkg.com/@commitlint/message/-/message-12.1.1.tgz#56eb1dbb561e85e9295380a46ff3b09bc93cac65", - "@commitlint/parse@^12.1.1": "https://registry.yarnpkg.com/@commitlint/parse/-/parse-12.1.1.tgz#3e49d6dc113d59cf266af0db99e320e933108c56", - "@commitlint/read@^12.1.1": "https://registry.yarnpkg.com/@commitlint/read/-/read-12.1.1.tgz#22a2d7fd1eab5e38b9b262311af28ac42f9a5097", - "@commitlint/resolve-extends@^12.1.1": "https://registry.yarnpkg.com/@commitlint/resolve-extends/-/resolve-extends-12.1.1.tgz#80a78b0940775d17888dd2985b52f93d93e0a885", - "@commitlint/rules@^12.1.1": "https://registry.yarnpkg.com/@commitlint/rules/-/rules-12.1.1.tgz#d59182a837d2addf301a3a4ef83316ae7e70248f", - "@commitlint/to-lines@^12.1.1": "https://registry.yarnpkg.com/@commitlint/to-lines/-/to-lines-12.1.1.tgz#40fbed1767d637249ce49b311a51909d8361ecf8", - "@commitlint/top-level@^12.1.1": "https://registry.yarnpkg.com/@commitlint/top-level/-/top-level-12.1.1.tgz#228df8fc36b6d7ea7ad149badfb6ef53dbc7001d", - "@commitlint/types@^12.1.1": "https://registry.yarnpkg.com/@commitlint/types/-/types-12.1.1.tgz#8e651f6af0171cd4f8d464c6c37a7cf63ee071bd", - "@eslint/eslintrc@^0.4.0": "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.0.tgz#99cc0a0584d72f1df38b900fb062ba995f395547", + "@commitlint/cli@^12.1.4": "https://registry.yarnpkg.com/@commitlint/cli/-/cli-12.1.4.tgz#af4d9dd3c0122c7b39a61fa1cd2abbad0422dbe0", + "@commitlint/config-conventional@^12.1.4": "https://registry.yarnpkg.com/@commitlint/config-conventional/-/config-conventional-12.1.4.tgz#95bbab622f117a8a3e49f95917b08655040c66a8", + "@commitlint/ensure@^12.1.4": "https://registry.yarnpkg.com/@commitlint/ensure/-/ensure-12.1.4.tgz#287ae2dcc5ccb086e749705b1bd9bdb99773056f", + "@commitlint/execute-rule@^12.1.4": "https://registry.yarnpkg.com/@commitlint/execute-rule/-/execute-rule-12.1.4.tgz#9973b02e9779adbf1522ae9ac207a4815ec73de1", + "@commitlint/format@^12.1.4": "https://registry.yarnpkg.com/@commitlint/format/-/format-12.1.4.tgz#db2d46418a6ae57c90e5f7f65dff46f0265d9f24", + "@commitlint/is-ignored@^12.1.4": "https://registry.yarnpkg.com/@commitlint/is-ignored/-/is-ignored-12.1.4.tgz#4c430bc3b361aa9be5cd4ddb252c1559870ea7bc", + "@commitlint/lint@^12.1.4": "https://registry.yarnpkg.com/@commitlint/lint/-/lint-12.1.4.tgz#856b7fd2b2e6367b836cb84a12f1c1b3c0e40d22", + "@commitlint/load@^12.1.4": "https://registry.yarnpkg.com/@commitlint/load/-/load-12.1.4.tgz#e3c2dbc0e7d8d928f57a6878bd7219909fc0acab", + "@commitlint/message@^12.1.4": "https://registry.yarnpkg.com/@commitlint/message/-/message-12.1.4.tgz#3895edcc0709deca5945f3d55f5ea95a9f1f446d", + "@commitlint/parse@^12.1.4": "https://registry.yarnpkg.com/@commitlint/parse/-/parse-12.1.4.tgz#ba03d54d24ef84f6fd2ff31c5e9998b22d7d0aa1", + "@commitlint/read@^12.1.4": "https://registry.yarnpkg.com/@commitlint/read/-/read-12.1.4.tgz#552fda42ef185d5b578beb6f626a5f8b282de3a6", + "@commitlint/resolve-extends@^12.1.4": "https://registry.yarnpkg.com/@commitlint/resolve-extends/-/resolve-extends-12.1.4.tgz#e758ed7dcdf942618b9f603a7c28a640f6a0802a", + "@commitlint/rules@^12.1.4": "https://registry.yarnpkg.com/@commitlint/rules/-/rules-12.1.4.tgz#0e141b08caa3d7bdc48aa784baa8baff3efd64db", + "@commitlint/to-lines@^12.1.4": "https://registry.yarnpkg.com/@commitlint/to-lines/-/to-lines-12.1.4.tgz#caa582dbf121f377a0588bb64e25c4854843cd25", + "@commitlint/top-level@^12.1.4": "https://registry.yarnpkg.com/@commitlint/top-level/-/top-level-12.1.4.tgz#96d5c715bfc1bdf86dfcf11b67fc2cf7658c7a6e", + "@commitlint/types@^12.1.4": "https://registry.yarnpkg.com/@commitlint/types/-/types-12.1.4.tgz#9618a5dc8991fb58e6de6ed89d7bf712fa74ba7e", + "@eslint/eslintrc@^0.4.2": "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.2.tgz#f63d0ef06f5c0c57d76c4ab5f63d3835c51b0179", "@istanbuljs/load-nyc-config@^1.0.0": "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced", "@istanbuljs/schema@^0.1.2": "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98", - "@jest/console@^26.6.2": "https://registry.yarnpkg.com/@jest/console/-/console-26.6.2.tgz#4e04bc464014358b03ab4937805ee36a0aeb98f2", - "@jest/core@^26.6.3": "https://registry.yarnpkg.com/@jest/core/-/core-26.6.3.tgz#7639fcb3833d748a4656ada54bde193051e45fad", - "@jest/environment@^26.6.2": "https://registry.yarnpkg.com/@jest/environment/-/environment-26.6.2.tgz#ba364cc72e221e79cc8f0a99555bf5d7577cf92c", - "@jest/fake-timers@^26.6.2": "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-26.6.2.tgz#459c329bcf70cee4af4d7e3f3e67848123535aad", - "@jest/globals@^26.6.2": "https://registry.yarnpkg.com/@jest/globals/-/globals-26.6.2.tgz#5b613b78a1aa2655ae908eba638cc96a20df720a", - "@jest/reporters@^26.6.2": "https://registry.yarnpkg.com/@jest/reporters/-/reporters-26.6.2.tgz#1f518b99637a5f18307bd3ecf9275f6882a667f6", - "@jest/source-map@^26.6.2": "https://registry.yarnpkg.com/@jest/source-map/-/source-map-26.6.2.tgz#29af5e1e2e324cafccc936f218309f54ab69d535", - "@jest/test-result@^26.6.2": "https://registry.yarnpkg.com/@jest/test-result/-/test-result-26.6.2.tgz#55da58b62df134576cc95476efa5f7949e3f5f18", - "@jest/test-sequencer@^26.6.3": "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-26.6.3.tgz#98e8a45100863886d074205e8ffdc5a7eb582b17", - "@jest/transform@^26.6.2": "https://registry.yarnpkg.com/@jest/transform/-/transform-26.6.2.tgz#5ac57c5fa1ad17b2aae83e73e45813894dcf2e4b", + "@jest/console@^27.0.2": "https://registry.yarnpkg.com/@jest/console/-/console-27.0.2.tgz#b8eeff8f21ac51d224c851e1729d2630c18631e6", + "@jest/core@^27.0.5": "https://registry.yarnpkg.com/@jest/core/-/core-27.0.5.tgz#59e9e69e7374d65dbb22e3fc1bd52e80991eae72", + "@jest/environment@^27.0.5": "https://registry.yarnpkg.com/@jest/environment/-/environment-27.0.5.tgz#a294ad4acda2e250f789fb98dc667aad33d3adc9", + "@jest/fake-timers@^27.0.5": "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-27.0.5.tgz#304d5aedadf4c75cff3696995460b39d6c6e72f6", + "@jest/globals@^27.0.5": "https://registry.yarnpkg.com/@jest/globals/-/globals-27.0.5.tgz#f63b8bfa6ea3716f8df50f6a604b5c15b36ffd20", + "@jest/reporters@^27.0.5": "https://registry.yarnpkg.com/@jest/reporters/-/reporters-27.0.5.tgz#cd730b77d9667b8ff700ad66d4edc293bb09716a", + "@jest/source-map@^27.0.1": "https://registry.yarnpkg.com/@jest/source-map/-/source-map-27.0.1.tgz#2afbf73ddbaddcb920a8e62d0238a0a9e0a8d3e4", + "@jest/test-result@^27.0.2": "https://registry.yarnpkg.com/@jest/test-result/-/test-result-27.0.2.tgz#0451049e32ceb609b636004ccc27c8fa22263f10", + "@jest/test-sequencer@^27.0.5": "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-27.0.5.tgz#c58b21db49afc36c0e3921d7ddf1fb7954abfded", + "@jest/transform@^27.0.5": "https://registry.yarnpkg.com/@jest/transform/-/transform-27.0.5.tgz#2dcb78953708af713941ac845b06078bc74ed873", "@jest/types@^26.6.2": "https://registry.yarnpkg.com/@jest/types/-/types-26.6.2.tgz#bef5a532030e1d88a2f5a6d933f84e97226ed48e", - "@nodelib/fs.scandir@2.1.4": "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz#d4b3549a5db5de2683e0c1071ab4f140904bbf69", - "@nodelib/fs.stat@2.0.4": "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.4.tgz#a3f2dd61bab43b8db8fa108a121cfffe4c676655", - "@nodelib/fs.stat@^2.0.2": "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.4.tgz#a3f2dd61bab43b8db8fa108a121cfffe4c676655", - "@nodelib/fs.walk@^1.2.3": "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.6.tgz#cce9396b30aa5afe9e3756608f5831adcb53d063", + "@jest/types@^27.0.2": "https://registry.yarnpkg.com/@jest/types/-/types-27.0.2.tgz#e153d6c46bda0f2589f0702b071f9898c7bbd37e", + "@nodelib/fs.scandir@2.1.5": "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5", + "@nodelib/fs.stat@2.0.5": "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b", + "@nodelib/fs.stat@^2.0.2": "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b", + "@nodelib/fs.walk@^1.2.3": "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.7.tgz#94c23db18ee4653e129abd26fb06f870ac9e1ee2", "@octokit/auth-token@^2.4.4": "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-2.4.5.tgz#568ccfb8cb46f36441fac094ce34f7a875b197f3", - "@octokit/core@^3.0.0": "https://registry.yarnpkg.com/@octokit/core/-/core-3.4.0.tgz#b48aa27d755b339fe7550548b340dcc2b513b742", - "@octokit/endpoint@^6.0.1": "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-6.0.11.tgz#082adc2aebca6dcefa1fb383f5efb3ed081949d1", - "@octokit/graphql@^4.5.8": "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-4.6.1.tgz#f975486a46c94b7dbe58a0ca751935edc7e32cc9", - "@octokit/openapi-types@^6.2.1": "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-6.2.1.tgz#5830395622ca0d8e945532c7ace722aec3670508", - "@octokit/openapi-types@^7.0.0": "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-7.0.0.tgz#0f6992db9854af15eca77d71ab0ec7fad2f20411", - "@octokit/plugin-paginate-rest@^2.2.3": "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.13.3.tgz#f0f1792230805108762d87906fb02d573b9e070a", - "@octokit/plugin-rest-endpoint-methods@^4.0.0": "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-4.15.1.tgz#91a064bee99d0ffcef74a04357e1cf15c27d1cd0", - "@octokit/plugin-rest-endpoint-methods@^5.1.1": "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.1.1.tgz#1720de3511944ebcca5c391ea82605e13e8f95eb", - "@octokit/request-error@^2.0.0": "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-2.0.5.tgz#72cc91edc870281ad583a42619256b380c600143", - "@octokit/request-error@^2.0.5": "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-2.0.5.tgz#72cc91edc870281ad583a42619256b380c600143", - "@octokit/request@^5.3.0": "https://registry.yarnpkg.com/@octokit/request/-/request-5.4.15.tgz#829da413dc7dd3aa5e2cdbb1c7d0ebe1f146a128", - "@octokit/request@^5.4.12": "https://registry.yarnpkg.com/@octokit/request/-/request-5.4.15.tgz#829da413dc7dd3aa5e2cdbb1c7d0ebe1f146a128", - "@octokit/types@^6.0.3": "https://registry.yarnpkg.com/@octokit/types/-/types-6.14.2.tgz#64c9457f38fb8522bdbba3c8cc814590a2d61bf5", - "@octokit/types@^6.11.0": "https://registry.yarnpkg.com/@octokit/types/-/types-6.14.2.tgz#64c9457f38fb8522bdbba3c8cc814590a2d61bf5", - "@octokit/types@^6.13.0": "https://registry.yarnpkg.com/@octokit/types/-/types-6.14.2.tgz#64c9457f38fb8522bdbba3c8cc814590a2d61bf5", - "@octokit/types@^6.14.1": "https://registry.yarnpkg.com/@octokit/types/-/types-6.14.2.tgz#64c9457f38fb8522bdbba3c8cc814590a2d61bf5", - "@octokit/types@^6.7.1": "https://registry.yarnpkg.com/@octokit/types/-/types-6.14.2.tgz#64c9457f38fb8522bdbba3c8cc814590a2d61bf5", + "@octokit/core@^3.4.0": "https://registry.yarnpkg.com/@octokit/core/-/core-3.5.1.tgz#8601ceeb1ec0e1b1b8217b960a413ed8e947809b", + "@octokit/endpoint@^6.0.1": "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-6.0.12.tgz#3b4d47a4b0e79b1027fb8d75d4221928b2d05658", + "@octokit/graphql@^4.5.8": "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-4.6.4.tgz#0c3f5bed440822182e972317122acb65d311a5ed", + "@octokit/openapi-types@^7.0.0": "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-7.3.2.tgz#065ce49b338043ec7f741316ce06afd4d459d944", + "@octokit/openapi-types@^7.3.2": "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-7.3.2.tgz#065ce49b338043ec7f741316ce06afd4d459d944", + "@octokit/plugin-paginate-rest@^2.13.3": "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.13.5.tgz#e459f9b5dccbe0a53f039a355d5b80c0a2b0dc57", + "@octokit/plugin-rest-endpoint-methods@^5.1.1": "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.3.1.tgz#deddce769b4ec3179170709ab42e4e9e6195aaa9", + "@octokit/plugin-rest-endpoint-methods@^5.3.1": "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.3.1.tgz#deddce769b4ec3179170709ab42e4e9e6195aaa9", + "@octokit/request-error@^2.0.5": "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-2.1.0.tgz#9e150357831bfc788d13a4fd4b1913d60c74d677", + "@octokit/request-error@^2.1.0": "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-2.1.0.tgz#9e150357831bfc788d13a4fd4b1913d60c74d677", + "@octokit/request@^5.6.0": "https://registry.yarnpkg.com/@octokit/request/-/request-5.6.0.tgz#6084861b6e4fa21dc40c8e2a739ec5eff597e672", + "@octokit/types@^6.0.3": "https://registry.yarnpkg.com/@octokit/types/-/types-6.16.4.tgz#d24f5e1bacd2fe96d61854b5bda0e88cf8288dfe", + "@octokit/types@^6.13.0": "https://registry.yarnpkg.com/@octokit/types/-/types-6.16.4.tgz#d24f5e1bacd2fe96d61854b5bda0e88cf8288dfe", + "@octokit/types@^6.16.1": "https://registry.yarnpkg.com/@octokit/types/-/types-6.16.4.tgz#d24f5e1bacd2fe96d61854b5bda0e88cf8288dfe", + "@octokit/types@^6.16.2": "https://registry.yarnpkg.com/@octokit/types/-/types-6.16.4.tgz#d24f5e1bacd2fe96d61854b5bda0e88cf8288dfe", "@sinonjs/commons@^1.7.0": "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d", - "@sinonjs/fake-timers@^6.0.1": "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz#293674fccb3262ac782c7aadfdeca86b10c75c40", - "@technote-space/anchor-markdown-header@^1.1.17": "https://registry.yarnpkg.com/@technote-space/anchor-markdown-header/-/anchor-markdown-header-1.1.18.tgz#e3025d0d471f4b28e76e319898be7d9cfdf33012", - "@technote-space/doctoc@^2.4.5": "https://registry.yarnpkg.com/@technote-space/doctoc/-/doctoc-2.4.5.tgz#cd0e9dd6e5b59a93f6ea4871200e4e341ba27b05", - "@technote-space/filter-github-action@^0.5.24": "https://registry.yarnpkg.com/@technote-space/filter-github-action/-/filter-github-action-0.5.24.tgz#0376ac239f290640e51b90624ad1da752f9a551d", - "@technote-space/github-action-helper@^5.2.6": "https://registry.yarnpkg.com/@technote-space/github-action-helper/-/github-action-helper-5.2.7.tgz#8a5672c7e13b879f34af7d5908b0e534495f295e", - "@technote-space/github-action-helper@^5.2.7": "https://registry.yarnpkg.com/@technote-space/github-action-helper/-/github-action-helper-5.2.7.tgz#8a5672c7e13b879f34af7d5908b0e534495f295e", - "@technote-space/github-action-log-helper@^0.1.24": "https://registry.yarnpkg.com/@technote-space/github-action-log-helper/-/github-action-log-helper-0.1.24.tgz#ee62eff52e1500be79e6f63b64f1cb64139a647f", - "@technote-space/github-action-pr-helper@^2.2.6": "https://registry.yarnpkg.com/@technote-space/github-action-pr-helper/-/github-action-pr-helper-2.2.6.tgz#c82543fb4c3f1a8fbf0d8fd549e93871fc6aafca", - "@technote-space/github-action-test-helper@^0.7.10": "https://registry.yarnpkg.com/@technote-space/github-action-test-helper/-/github-action-test-helper-0.7.10.tgz#7aa12704d1286fb0891ea77e2d4905e7522d11e7", + "@sinonjs/fake-timers@^7.0.2": "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-7.1.2.tgz#2524eae70c4910edccf99b2f4e6efc5894aff7b5", + "@technote-space/anchor-markdown-header@^1.1.21": "https://registry.yarnpkg.com/@technote-space/anchor-markdown-header/-/anchor-markdown-header-1.1.21.tgz#703f1190348330f4eaa913549282860e8e74ac67", + "@technote-space/doctoc@^2.4.7": "https://registry.yarnpkg.com/@technote-space/doctoc/-/doctoc-2.4.7.tgz#35b62f4b94933312f1aba0201d5111fd09a6ac6d", + "@technote-space/filter-github-action@^0.5.27": "https://registry.yarnpkg.com/@technote-space/filter-github-action/-/filter-github-action-0.5.28.tgz#e53d92c807ed12208405bd2d3d0facea8e980a7a", + "@technote-space/github-action-helper@^5.2.6": "https://registry.yarnpkg.com/@technote-space/github-action-helper/-/github-action-helper-5.2.9.tgz#a8d53f720cdb482b0f3cff0979b75680ebc13619", + "@technote-space/github-action-helper@^5.2.9": "https://registry.yarnpkg.com/@technote-space/github-action-helper/-/github-action-helper-5.2.9.tgz#a8d53f720cdb482b0f3cff0979b75680ebc13619", + "@technote-space/github-action-log-helper@^0.1.24": "https://registry.yarnpkg.com/@technote-space/github-action-log-helper/-/github-action-log-helper-0.1.28.tgz#98234ad03c2e25212e489064f44479c20494c53a", + "@technote-space/github-action-log-helper@^0.1.25": "https://registry.yarnpkg.com/@technote-space/github-action-log-helper/-/github-action-log-helper-0.1.28.tgz#98234ad03c2e25212e489064f44479c20494c53a", + "@technote-space/github-action-log-helper@^0.1.27": "https://registry.yarnpkg.com/@technote-space/github-action-log-helper/-/github-action-log-helper-0.1.28.tgz#98234ad03c2e25212e489064f44479c20494c53a", + "@technote-space/github-action-log-helper@^0.1.28": "https://registry.yarnpkg.com/@technote-space/github-action-log-helper/-/github-action-log-helper-0.1.28.tgz#98234ad03c2e25212e489064f44479c20494c53a", + "@technote-space/github-action-pr-helper@^2.2.7": "https://registry.yarnpkg.com/@technote-space/github-action-pr-helper/-/github-action-pr-helper-2.2.7.tgz#7290e889b6e047f1c1ecd9b7d9666f5d34f195d6", + "@technote-space/github-action-test-helper@^0.7.13": "https://registry.yarnpkg.com/@technote-space/github-action-test-helper/-/github-action-test-helper-0.7.13.tgz#0dad3842276b4de558a5e59922009d85a02001cf", "@technote-space/release-github-actions-cli@^1.8.5": "https://registry.yarnpkg.com/@technote-space/release-github-actions-cli/-/release-github-actions-cli-1.8.5.tgz#ce105bf916e064207126ec55cea639208a8b98e7", - "@technote-space/release-github-actions@^7.0.5": "https://registry.yarnpkg.com/@technote-space/release-github-actions/-/release-github-actions-7.0.5.tgz#f3d49a484aefcfd03f123a8fec787162ede07188", - "@textlint/ast-node-types@^4.4.2": "https://registry.yarnpkg.com/@textlint/ast-node-types/-/ast-node-types-4.4.2.tgz#d3fda5c9086baba04bc75440039ccf18415c8446", - "@textlint/markdown-to-ast@^6.3.4": "https://registry.yarnpkg.com/@textlint/markdown-to-ast/-/markdown-to-ast-6.3.4.tgz#392c14d1f8b0643ada220d99fc0ee9f1e375fa27", + "@technote-space/release-github-actions@^7.0.5": "https://registry.yarnpkg.com/@technote-space/release-github-actions/-/release-github-actions-7.0.6.tgz#7c65e40e0ed2c41ec6a19816310ffece3f27dd1b", + "@textlint/ast-node-types@^12.0.0": "https://registry.yarnpkg.com/@textlint/ast-node-types/-/ast-node-types-12.0.0.tgz#23bd683f9fc04209ae28bff72954c8aa67c6b1ca", + "@textlint/markdown-to-ast@^12.0.0": "https://registry.yarnpkg.com/@textlint/markdown-to-ast/-/markdown-to-ast-12.0.0.tgz#bdd2f572e12be04b153789413a31fcf78d1f98d2", + "@tootallnate/once@1": "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82", "@types/babel__core@^7.0.0": "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.14.tgz#faaeefc4185ec71c389f4501ee5ec84b170cc402", - "@types/babel__core@^7.1.7": "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.14.tgz#faaeefc4185ec71c389f4501ee5ec84b170cc402", + "@types/babel__core@^7.1.14": "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.14.tgz#faaeefc4185ec71c389f4501ee5ec84b170cc402", "@types/babel__generator@*": "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.2.tgz#f3d71178e187858f7c45e30380f8f1b7415a12d8", "@types/babel__template@*": "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.0.tgz#0c888dd70b3ee9eebb6e4f200e809da0076262be", "@types/babel__traverse@*": "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.11.1.tgz#654f6c4f67568e24c23b367e947098c6206fa639", @@ -170,25 +178,30 @@ "@types/istanbul-lib-coverage@^2.0.0": "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz#4ba8ddb720221f432e443bd5f9117fd22cfd4762", "@types/istanbul-lib-coverage@^2.0.1": "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz#4ba8ddb720221f432e443bd5f9117fd22cfd4762", "@types/istanbul-lib-report@*": "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686", - "@types/istanbul-reports@^3.0.0": "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.0.tgz#508b13aa344fa4976234e75dddcc34925737d821", + "@types/istanbul-reports@^3.0.0": "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff", "@types/jest@^26.0.23": "https://registry.yarnpkg.com/@types/jest/-/jest-26.0.23.tgz#a1b7eab3c503b80451d019efb588ec63522ee4e7", - "@types/json-schema@^7.0.3": "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.7.tgz#98a993516c859eb0d5c4c8f098317a9ea68db9ad", + "@types/json-schema@^7.0.7": "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.7.tgz#98a993516c859eb0d5c4c8f098317a9ea68db9ad", + "@types/mdast@^3.0.0": "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.3.tgz#2d7d671b1cd1ea3deb306ea75036c2a0407d2deb", "@types/minimist@^1.2.0": "https://registry.yarnpkg.com/@types/minimist/-/minimist-1.2.1.tgz#283f669ff76d7b8260df8ab7a4262cc83d988256", - "@types/node@*": "https://registry.yarnpkg.com/@types/node/-/node-15.0.2.tgz#51e9c0920d1b45936ea04341aa3e2e58d339fb67", - "@types/node@^15.0.2": "https://registry.yarnpkg.com/@types/node/-/node-15.0.2.tgz#51e9c0920d1b45936ea04341aa3e2e58d339fb67", + "@types/node@*": "https://registry.yarnpkg.com/@types/node/-/node-15.12.4.tgz#e1cf817d70a1e118e81922c4ff6683ce9d422e26", + "@types/node@^15.12.4": "https://registry.yarnpkg.com/@types/node/-/node-15.12.4.tgz#e1cf817d70a1e118e81922c4ff6683ce9d422e26", "@types/normalize-package-data@^2.4.0": "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e", "@types/parse-json@^4.0.0": "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0", - "@types/prettier@^2.0.0": "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.2.3.tgz#ef65165aea2924c9359205bf748865b8881753c0", + "@types/prettier@^2.1.5": "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.3.0.tgz#2e8332cc7363f887d32ec5496b207d26ba8052bb", "@types/stack-utils@^2.0.0": "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.0.tgz#7036640b4e21cc2f259ae826ce843d277dad8cff", + "@types/unist@*": "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e", + "@types/unist@^2.0.0": "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e", + "@types/unist@^2.0.2": "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e", "@types/yargs-parser@*": "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-20.2.0.tgz#dd3e6699ba3237f0348cd085e4698780204842f9", "@types/yargs@^15.0.0": "https://registry.yarnpkg.com/@types/yargs/-/yargs-15.0.13.tgz#34f7fec8b389d7f3c1fd08026a5763e072d3c6dc", - "@typescript-eslint/eslint-plugin@^4.22.1": "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.22.1.tgz#6bcdbaa4548553ab861b4e5f34936ead1349a543", - "@typescript-eslint/experimental-utils@4.22.1": "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-4.22.1.tgz#3938a5c89b27dc9a39b5de63a62ab1623ab27497", - "@typescript-eslint/parser@^4.22.1": "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.22.1.tgz#a95bda0fd01d994a15fc3e99dc984294f25c19cc", - "@typescript-eslint/scope-manager@4.22.1": "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.22.1.tgz#5bb357f94f9cd8b94e6be43dd637eb73b8f355b4", - "@typescript-eslint/types@4.22.1": "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.22.1.tgz#bf99c6cec0b4a23d53a61894816927f2adad856a", - "@typescript-eslint/typescript-estree@4.22.1": "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.22.1.tgz#dca379eead8cdfd4edc04805e83af6d148c164f9", - "@typescript-eslint/visitor-keys@4.22.1": "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.22.1.tgz#6045ae25a11662c671f90b3a403d682dfca0b7a6", + "@types/yargs@^16.0.0": "https://registry.yarnpkg.com/@types/yargs/-/yargs-16.0.3.tgz#4b6d35bb8e680510a7dc2308518a80ee1ef27e01", + "@typescript-eslint/eslint-plugin@^4.28.0": "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.28.0.tgz#1a66f03b264844387beb7dc85e1f1d403bd1803f", + "@typescript-eslint/experimental-utils@4.28.0": "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-4.28.0.tgz#13167ed991320684bdc23588135ae62115b30ee0", + "@typescript-eslint/parser@^4.28.0": "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.28.0.tgz#2404c16751a28616ef3abab77c8e51d680a12caa", + "@typescript-eslint/scope-manager@4.28.0": "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.28.0.tgz#6a3009d2ab64a30fc8a1e257a1a320067f36a0ce", + "@typescript-eslint/types@4.28.0": "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.28.0.tgz#a33504e1ce7ac51fc39035f5fe6f15079d4dafb0", + "@typescript-eslint/typescript-estree@4.28.0": "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.28.0.tgz#e66d4e5aa2ede66fec8af434898fe61af10c71cf", + "@typescript-eslint/visitor-keys@4.28.0": "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.28.0.tgz#255c67c966ec294104169a6939d96f91c8a89434", "JSONStream@^1.0.4": "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0", "abab@^2.0.3": "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a", "abab@^2.0.5": "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a", @@ -197,12 +210,12 @@ "acorn-walk@^7.1.1": "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc", "acorn@^7.1.1": "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa", "acorn@^7.4.0": "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa", - "acorn@^8.1.0": "https://registry.yarnpkg.com/acorn/-/acorn-8.2.4.tgz#caba24b08185c3b56e3168e97d15ed17f4d31fd0", + "acorn@^8.2.4": "https://registry.yarnpkg.com/acorn/-/acorn-8.4.0.tgz#af53266e698d7cffa416714b503066a82221be60", + "agent-base@6": "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77", "aggregate-error@^3.0.0": "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a", "ajv@^6.10.0": "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4", - "ajv@^6.12.3": "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4", "ajv@^6.12.4": "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4", - "ajv@^8.0.1": "https://registry.yarnpkg.com/ajv/-/ajv-8.2.0.tgz#c89d3380a784ce81b2085f48811c4c101df4c602", + "ajv@^8.0.1": "https://registry.yarnpkg.com/ajv/-/ajv-8.6.0.tgz#60cc45d9c46a477d80d92c48076d972c342e5720", "ansi-colors@^4.1.1": "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348", "ansi-escapes@^4.2.1": "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e", "ansi-escapes@^4.3.0": "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e", @@ -210,56 +223,38 @@ "ansi-styles@^3.2.1": "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d", "ansi-styles@^4.0.0": "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937", "ansi-styles@^4.1.0": "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937", - "anymatch@^2.0.0": "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb", + "ansi-styles@^5.0.0": "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b", "anymatch@^3.0.3": "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716", "argparse@^1.0.7": "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911", "argparse@^2.0.1": "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38", - "arr-diff@^4.0.0": "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520", - "arr-flatten@^1.1.0": "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1", - "arr-union@^3.1.0": "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4", "array-ify@^1.0.0": "https://registry.yarnpkg.com/array-ify/-/array-ify-1.0.0.tgz#9e528762b4a9066ad163a6962a364418e9626ece", "array-union@^2.1.0": "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d", - "array-unique@^0.3.2": "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428", "arrify@^1.0.1": "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d", - "asn1@~0.2.3": "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136", - "assert-plus@1.0.0": "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525", - "assert-plus@^1.0.0": "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525", - "assign-symbols@^1.0.0": "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367", "astral-regex@^2.0.0": "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31", "asynckit@^0.4.0": "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79", "at-least-node@^1.0.0": "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2", - "atob@^2.1.2": "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9", - "aws-sign2@~0.7.0": "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8", - "aws4@^1.8.0": "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59", - "babel-jest@^26.6.3": "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056", + "babel-jest@^27.0.5": "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.0.5.tgz#cd34c033ada05d1362211e5152391fd7a88080c8", "babel-plugin-istanbul@^6.0.0": "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.0.0.tgz#e159ccdc9af95e0b570c75b4573b7c34d671d765", - "babel-plugin-jest-hoist@^26.6.2": "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-26.6.2.tgz#8185bd030348d254c6d7dd974355e6a28b21e62d", + "babel-plugin-jest-hoist@^27.0.1": "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.0.1.tgz#a6d10e484c93abff0f4e95f437dad26e5736ea11", "babel-preset-current-node-syntax@^1.0.0": "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b", - "babel-preset-jest@^26.6.2": "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-26.6.2.tgz#747872b1171df032252426586881d62d31798fee", + "babel-preset-jest@^27.0.1": "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-27.0.1.tgz#7a50c75d16647c23a2cf5158d5bb9eb206b10e20", "bail@^1.0.0": "https://registry.yarnpkg.com/bail/-/bail-1.0.5.tgz#b6fa133404a392cbc1f8c4bf63f5953351e7a776", "balanced-match@^1.0.0": "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee", - "base@^0.11.1": "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f", - "bcrypt-pbkdf@^1.0.0": "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e", - "before-after-hook@^2.2.0": "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.1.tgz#73540563558687586b52ed217dad6a802ab1549c", - "boundary@^1.0.1": "https://registry.yarnpkg.com/boundary/-/boundary-1.0.1.tgz#4d67dc2602c0cc16dd9bce7ebf87e948290f5812", + "before-after-hook@^2.2.0": "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.2.tgz#a6e8ca41028d90ee2c24222f201c90956091613e", "brace-expansion@^1.1.7": "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd", - "braces@^2.3.1": "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729", "braces@^3.0.1": "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107", "browser-process-hrtime@^1.0.0": "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626", - "browserslist@^4.14.5": "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2", + "browserslist@^4.16.6": "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2", "bs-logger@0.x": "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8", "bser@2.1.1": "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05", "buffer-from@1.x": "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef", "buffer-from@^1.0.0": "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef", - "cache-base@^1.0.1": "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2", "callsites@^3.0.0": "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73", "camelcase-keys@^6.2.2": "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-6.2.2.tgz#5e755d6ba51aa223ec7d3d52f25778210f9dc3c0", - "camelcase@^5.0.0": "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320", "camelcase@^5.3.1": "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320", - "camelcase@^6.0.0": "https://registry.yarnpkg.com/camelcase/-/camelcase-6.2.0.tgz#924af881c9d525ac9d87f40d964e5cea982a1809", - "caniuse-lite@^1.0.30001219": "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001223.tgz#39b49ff0bfb3ee3587000d2f66c47addc6e14443", - "capture-exit@^2.0.0": "https://registry.yarnpkg.com/capture-exit/-/capture-exit-2.0.0.tgz#fb953bfaebeb781f62898239dabb426d08a509a4", - "caseless@~0.12.0": "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc", + "camelcase@^6.2.0": "https://registry.yarnpkg.com/camelcase/-/camelcase-6.2.0.tgz#924af881c9d525ac9d87f40d964e5cea982a1809", + "caniuse-lite@^1.0.30001219": "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001239.tgz#66e8669985bb2cb84ccb10f68c25ce6dd3e4d2b8", + "ccount@^1.0.0": "https://registry.yarnpkg.com/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043", "chalk@^2.0.0": "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424", "chalk@^4.0.0": "https://registry.yarnpkg.com/chalk/-/chalk-4.1.1.tgz#c80b3fab28bf6371e6863325eee67e618b77e6ad", "chalk@^4.1.0": "https://registry.yarnpkg.com/chalk/-/chalk-4.1.1.tgz#c80b3fab28bf6371e6863325eee67e618b77e6ad", @@ -268,88 +263,71 @@ "character-entities-legacy@^1.0.0": "https://registry.yarnpkg.com/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz#94bc1845dce70a5bb9d2ecc748725661293d8fc1", "character-entities@^1.0.0": "https://registry.yarnpkg.com/character-entities/-/character-entities-1.2.4.tgz#e12c3939b7eaf4e5b15e7ad4c5e28e1d48c5b16b", "character-reference-invalid@^1.0.0": "https://registry.yarnpkg.com/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz#083329cda0eae272ab3dbbf37e9a382c13af1560", - "ci-info@^2.0.0": "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46", - "cjs-module-lexer@^0.6.0": "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-0.6.0.tgz#4186fcca0eae175970aee870b9fe2d6cf8d5655f", - "class-utils@^0.3.5": "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463", + "ci-info@^3.1.1": "https://registry.yarnpkg.com/ci-info/-/ci-info-3.2.0.tgz#2876cb948a498797b5236f0095bc057d0dca38b6", + "cjs-module-lexer@^1.0.0": "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.1.tgz#2fd46d9906a126965aa541345c499aaa18e8cd73", "clean-stack@^2.0.0": "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b", "cli-cursor@^3.1.0": "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307", "cli-truncate@^2.1.0": "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-2.1.0.tgz#c39e28bf05edcde5be3b98992a22deed5a2b93c7", - "cliui@^6.0.0": "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1", "cliui@^7.0.2": "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f", "co@^4.6.0": "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184", - "collapse-white-space@^1.0.2": "https://registry.yarnpkg.com/collapse-white-space/-/collapse-white-space-1.0.6.tgz#e63629c0016665792060dbbeb79c42239d2c5287", "collect-v8-coverage@^1.0.0": "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59", - "collection-visit@^1.0.0": "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0", "color-convert@^1.9.0": "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8", "color-convert@^2.0.1": "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3", "color-name@1.1.3": "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25", "color-name@~1.1.4": "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2", "colorette@^1.2.2": "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94", - "combined-stream@^1.0.6": "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f", - "combined-stream@~1.0.6": "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f", + "combined-stream@^1.0.8": "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f", "commander@^7.2.0": "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7", "compare-func@^2.0.0": "https://registry.yarnpkg.com/compare-func/-/compare-func-2.0.0.tgz#fb65e75edbddfd2e568554e8b5b05fff7a51fcb3", - "component-emitter@^1.2.1": "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0", "concat-map@0.0.1": "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b", "conventional-changelog-angular@^5.0.11": "https://registry.yarnpkg.com/conventional-changelog-angular/-/conventional-changelog-angular-5.0.12.tgz#c979b8b921cbfe26402eb3da5bbfda02d865a2b9", "conventional-changelog-conventionalcommits@^4.3.1": "https://registry.yarnpkg.com/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-4.6.0.tgz#7fc17211dbca160acf24687bd2fdd5fd767750eb", "conventional-commits-parser@^3.0.0": "https://registry.yarnpkg.com/conventional-commits-parser/-/conventional-commits-parser-3.2.1.tgz#ba44f0b3b6588da2ee9fd8da508ebff50d116ce2", - "convert-source-map@^1.4.0": "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442", - "convert-source-map@^1.6.0": "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442", - "convert-source-map@^1.7.0": "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442", - "copy-descriptor@^0.1.0": "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d", - "core-util-is@1.0.2": "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7", + "convert-source-map@^1.4.0": "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369", + "convert-source-map@^1.6.0": "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369", + "convert-source-map@^1.7.0": "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369", "cosmiconfig@^7.0.0": "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.0.tgz#ef9b44d773959cae63ddecd122de23853b60f8d3", - "cross-spawn@^6.0.0": "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4", - "cross-spawn@^7.0.0": "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6", "cross-spawn@^7.0.2": "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6", "cross-spawn@^7.0.3": "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6", "cssom@^0.4.4": "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10", "cssom@~0.3.6": "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a", "cssstyle@^2.3.0": "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852", "dargs@^7.0.0": "https://registry.yarnpkg.com/dargs/-/dargs-7.0.0.tgz#04015c41de0bcb69ec84050f3d9be0caf8d6d5cc", - "dashdash@^1.12.0": "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0", "data-urls@^2.0.0": "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b", - "debug@^2.2.0": "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f", - "debug@^2.3.3": "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f", + "debug@4": "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee", + "debug@^4.0.0": "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee", "debug@^4.0.1": "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee", "debug@^4.1.0": "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee", "debug@^4.1.1": "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee", "debug@^4.3.1": "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee", "decamelize-keys@^1.1.0": "https://registry.yarnpkg.com/decamelize-keys/-/decamelize-keys-1.1.0.tgz#d171a87933252807eb3cb61dc1c1445d078df2d9", "decamelize@^1.1.0": "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290", - "decamelize@^1.2.0": "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290", "decimal.js@^10.2.1": "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.2.1.tgz#238ae7b0f0c793d3e3cea410108b35a2c01426a3", - "decode-uri-component@^0.2.0": "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545", "dedent@^0.7.0": "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c", "deep-is@^0.1.3": "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34", "deep-is@~0.1.3": "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34", "deepmerge@^4.2.2": "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955", - "define-property@^0.2.5": "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116", - "define-property@^1.0.0": "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6", - "define-property@^2.0.2": "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d", "delayed-stream@~1.0.0": "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619", "deprecation@^2.0.0": "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919", "deprecation@^2.3.1": "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919", "detect-newline@^3.0.0": "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651", "diff-sequences@^26.6.2": "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-26.6.2.tgz#48ba99157de1923412eed41db6b6d4aa9ca7c0b1", + "diff-sequences@^27.0.1": "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.0.1.tgz#9c9801d52ed5f576ff0a20e3022a13ee6e297e7c", "dir-glob@^3.0.1": "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f", "doctrine@^3.0.0": "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961", - "dom-serializer@^1.0.1": "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.3.1.tgz#d845a1565d7c041a95e5dab62184ab41e3a519be", + "dom-serializer@^1.0.1": "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.3.2.tgz#6206437d32ceefaec7161803230c7a20bc1b4d91", "domelementtype@^2.0.1": "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.2.0.tgz#9a0b6c2782ed6a1c7323d42267183df9bd8b1d57", "domelementtype@^2.2.0": "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.2.0.tgz#9a0b6c2782ed6a1c7323d42267183df9bd8b1d57", "domexception@^2.0.1": "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304", "domhandler@^4.0.0": "https://registry.yarnpkg.com/domhandler/-/domhandler-4.2.0.tgz#f9768a5f034be60a89a27c2e4d0f74eba0d8b059", "domhandler@^4.2.0": "https://registry.yarnpkg.com/domhandler/-/domhandler-4.2.0.tgz#f9768a5f034be60a89a27c2e4d0f74eba0d8b059", - "domutils@^2.5.2": "https://registry.yarnpkg.com/domutils/-/domutils-2.6.0.tgz#2e15c04185d43fb16ae7057cb76433c6edb938b7", + "domutils@^2.5.2": "https://registry.yarnpkg.com/domutils/-/domutils-2.7.0.tgz#8ebaf0c41ebafcf55b0b72ec31c56323712c5442", "dot-prop@^5.1.0": "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88", "dotenv@^8.2.0": "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b", - "ecc-jsbn@~0.1.1": "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9", - "electron-to-chromium@^1.3.723": "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.727.tgz#857e310ca00f0b75da4e1db6ff0e073cc4a91ddf", - "emittery@^0.7.1": "https://registry.yarnpkg.com/emittery/-/emittery-0.7.2.tgz#25595908e13af0f5674ab419396e2fb394cdfa82", + "electron-to-chromium@^1.3.723": "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.754.tgz#afbe69177ad7aae968c3bbeba129dc70dcc37cf4", + "emittery@^0.8.1": "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860", "emoji-regex@^8.0.0": "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37", "emoji-regex@^9.2.2": "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72", - "end-of-stream@^1.1.0": "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0", "enquirer@^2.3.5": "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d", "enquirer@^2.3.6": "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d", "entities@^2.0.0": "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55", @@ -357,15 +335,15 @@ "escalade@^3.1.1": "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40", "escape-string-regexp@^1.0.5": "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4", "escape-string-regexp@^2.0.0": "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344", + "escape-string-regexp@^4.0.0": "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34", "escodegen@^2.0.0": "https://registry.yarnpkg.com/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd", - "eslint-scope@^5.0.0": "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c", "eslint-scope@^5.1.1": "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c", - "eslint-utils@^2.0.0": "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27", "eslint-utils@^2.1.0": "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27", + "eslint-utils@^3.0.0": "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672", "eslint-visitor-keys@^1.1.0": "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e", "eslint-visitor-keys@^1.3.0": "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e", "eslint-visitor-keys@^2.0.0": "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303", - "eslint@^7.25.0": "https://registry.yarnpkg.com/eslint/-/eslint-7.25.0.tgz#1309e4404d94e676e3e831b3a3ad2b050031eb67", + "eslint@^7.29.0": "https://registry.yarnpkg.com/eslint/-/eslint-7.29.0.tgz#ee2a7648f2e729485e4d0bd6383ec1deabc8b3c0", "espree@^7.3.0": "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6", "espree@^7.3.1": "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6", "esprima@^4.0.0": "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71", @@ -376,22 +354,12 @@ "estraverse@^5.1.0": "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880", "estraverse@^5.2.0": "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880", "esutils@^2.0.2": "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64", - "exec-sh@^0.3.2": "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.3.6.tgz#ff264f9e325519a60cb5e273692943483cca63bc", - "execa@^1.0.0": "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8", - "execa@^4.0.0": "https://registry.yarnpkg.com/execa/-/execa-4.1.0.tgz#4e5491ad1572f2f17a77d388c6c857135b22847a", - "execa@^5.0.0": "https://registry.yarnpkg.com/execa/-/execa-5.0.0.tgz#4029b0007998a841fbd1032e5f4de86a3c1e3376", + "execa@^5.0.0": "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd", "exit@^0.1.2": "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c", - "expand-brackets@^2.1.4": "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622", - "expect@^26.6.2": "https://registry.yarnpkg.com/expect/-/expect-26.6.2.tgz#c6b996bf26bf3fe18b67b2d0f51fc981ba934417", - "extend-shallow@^2.0.1": "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f", - "extend-shallow@^3.0.0": "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8", - "extend-shallow@^3.0.2": "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8", + "expect@^27.0.2": "https://registry.yarnpkg.com/expect/-/expect-27.0.2.tgz#e66ca3a4c9592f1c019fa1d46459a9d2084f3422", "extend@^3.0.0": "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa", - "extend@~3.0.2": "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa", - "extglob@^2.0.4": "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543", - "extsprintf@1.3.0": "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05", - "extsprintf@^1.2.0": "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f", "fast-deep-equal@^3.1.1": "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525", + "fast-deep-equal@^3.1.3": "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525", "fast-glob@^3.1.1": "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.5.tgz#7939af2a656de79a4f1901903ee8adcaa7cb9661", "fast-glob@^3.2.5": "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.5.tgz#7939af2a656de79a4f1901903ee8adcaa7cb9661", "fast-json-stable-stringify@2.x": "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633", @@ -399,73 +367,54 @@ "fast-levenshtein@^2.0.6": "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917", "fast-levenshtein@~2.0.6": "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917", "fastq@^1.6.0": "https://registry.yarnpkg.com/fastq/-/fastq-1.11.0.tgz#bb9fb955a07130a918eb63c1f5161cc32a5d0858", - "fault@^1.0.1": "https://registry.yarnpkg.com/fault/-/fault-1.0.4.tgz#eafcfc0a6d214fc94601e170df29954a4f842f13", + "fault@^1.0.0": "https://registry.yarnpkg.com/fault/-/fault-1.0.4.tgz#eafcfc0a6d214fc94601e170df29954a4f842f13", "fb-watchman@^2.0.0": "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85", - "figures@^3.2.0": "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af", "file-entry-cache@^6.0.1": "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027", - "fill-range@^4.0.0": "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7", "fill-range@^7.0.1": "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40", "find-up@^4.0.0": "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19", "find-up@^4.1.0": "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19", "find-up@^5.0.0": "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc", "flat-cache@^3.0.4": "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11", "flatted@^3.1.0": "https://registry.yarnpkg.com/flatted/-/flatted-3.1.1.tgz#c4b489e80096d9df1dfc97c79871aea7c617c469", - "for-in@^1.0.2": "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80", - "forever-agent@~0.6.1": "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91", - "form-data@~2.3.2": "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6", + "form-data@^3.0.0": "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f", "format@^0.2.0": "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b", - "fragment-cache@^0.2.1": "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19", "fromentries@^1.3.2": "https://registry.yarnpkg.com/fromentries/-/fromentries-1.3.2.tgz#e4bca6808816bf8f93b52750f1127f5a6fd86e3a", "fs-extra@^9.0.0": "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d", "fs.realpath@^1.0.0": "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f", - "fsevents@^2.1.2": "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a", + "fsevents@^2.3.2": "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a", "function-bind@^1.1.1": "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d", "functional-red-black-tree@^1.0.1": "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327", "gensync@^1.0.0-beta.2": "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0", - "get-caller-file@^2.0.1": "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e", "get-caller-file@^2.0.5": "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e", "get-own-enumerable-property-symbols@^3.0.0": "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664", "get-package-type@^0.1.0": "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a", - "get-stdin@8.0.0": "https://registry.yarnpkg.com/get-stdin/-/get-stdin-8.0.0.tgz#cbad6a73feb75f6eeb22ba9e01f89aa28aa97a53", - "get-stream@^4.0.0": "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5", - "get-stream@^5.0.0": "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3", "get-stream@^6.0.0": "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7", - "get-value@^2.0.3": "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28", - "get-value@^2.0.6": "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28", - "getpass@^0.1.1": "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa", "git-raw-commits@^2.0.0": "https://registry.yarnpkg.com/git-raw-commits/-/git-raw-commits-2.0.10.tgz#e2255ed9563b1c9c3ea6bd05806410290297bbc1", - "glob-parent@^5.0.0": "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4", "glob-parent@^5.1.0": "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4", + "glob-parent@^5.1.2": "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4", "glob@^7.1.1": "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90", "glob@^7.1.2": "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90", "glob@^7.1.3": "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90", "glob@^7.1.4": "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90", "global-dirs@^0.1.1": "https://registry.yarnpkg.com/global-dirs/-/global-dirs-0.1.1.tgz#b319c0dd4607f353f3be9cca4c72fc148c49f445", "globals@^11.1.0": "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e", - "globals@^12.1.0": "https://registry.yarnpkg.com/globals/-/globals-12.4.0.tgz#a18813576a41b00a24a97e7f815918c2e19925f8", - "globals@^13.6.0": "https://registry.yarnpkg.com/globals/-/globals-13.8.0.tgz#3e20f504810ce87a8d72e55aecf8435b50f4c1b3", - "globby@^11.0.1": "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb", + "globals@^13.6.0": "https://registry.yarnpkg.com/globals/-/globals-13.9.0.tgz#4bf2bf635b334a173fb1daf7c5e6b218ecdc06cb", + "globals@^13.9.0": "https://registry.yarnpkg.com/globals/-/globals-13.9.0.tgz#4bf2bf635b334a173fb1daf7c5e6b218ecdc06cb", + "globby@^11.0.3": "https://registry.yarnpkg.com/globby/-/globby-11.0.4.tgz#2cbaff77c2f2a62e71e9b2813a67b97a3a3001a5", "graceful-fs@^4.1.6": "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee", "graceful-fs@^4.2.0": "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee", "graceful-fs@^4.2.4": "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee", - "growly@^1.3.0": "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081", - "har-schema@^2.0.0": "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92", - "har-validator@~5.1.3": "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd", "hard-rejection@^2.1.0": "https://registry.yarnpkg.com/hard-rejection/-/hard-rejection-2.1.0.tgz#1c6eda5c1685c63942766d79bb40ae773cecd883", "has-flag@^3.0.0": "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd", "has-flag@^4.0.0": "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b", - "has-value@^0.3.1": "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f", - "has-value@^1.0.0": "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177", - "has-values@^0.1.4": "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771", - "has-values@^1.0.0": "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f", "has@^1.0.3": "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796", "hosted-git-info@^2.1.4": "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9", "hosted-git-info@^4.0.1": "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-4.0.2.tgz#5e425507eede4fea846b7262f0838456c4209961", "html-encoding-sniffer@^2.0.1": "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3", "html-escaper@^2.0.0": "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453", "htmlparser2@^6.1.0": "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7", - "http-signature@~1.2.0": "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1", - "human-signals@^1.1.1": "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3", + "http-proxy-agent@^4.0.1": "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a", + "https-proxy-agent@^5.0.0": "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2", "human-signals@^2.1.0": "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0", "husky@^6.0.0": "https://registry.yarnpkg.com/husky/-/husky-6.0.0.tgz#810f11869adf51604c32ea577edbc377d7f9319e", "iconv-lite@0.4.24": "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b", @@ -478,122 +427,86 @@ "indent-string@^4.0.0": "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251", "inflight@^1.0.4": "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9", "inherits@2": "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c", - "inherits@^2.0.0": "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c", "inherits@^2.0.3": "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c", "ini@^1.3.4": "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c", - "is-accessor-descriptor@^0.1.6": "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6", - "is-accessor-descriptor@^1.0.0": "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656", "is-alphabetical@^1.0.0": "https://registry.yarnpkg.com/is-alphabetical/-/is-alphabetical-1.0.4.tgz#9e7d6b94916be22153745d184c298cbf986a686d", "is-alphanumerical@^1.0.0": "https://registry.yarnpkg.com/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz#7eb9a2431f855f6b1ef1a78e326df515696c4dbf", "is-arrayish@^0.2.1": "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d", - "is-buffer@^1.1.4": "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be", - "is-buffer@^1.1.5": "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be", - "is-ci@^2.0.0": "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c", - "is-core-module@^2.2.0": "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.3.0.tgz#d341652e3408bca69c4671b79a0954a3d349f887", - "is-data-descriptor@^0.1.4": "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56", - "is-data-descriptor@^1.0.0": "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7", + "is-buffer@^2.0.0": "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191", + "is-ci@^3.0.0": "https://registry.yarnpkg.com/is-ci/-/is-ci-3.0.0.tgz#c7e7be3c9d8eef7d0fa144390bd1e4b88dc4c994", + "is-core-module@^2.2.0": "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.4.0.tgz#8e9fc8e15027b011418026e98f0e6f4d86305cc1", "is-decimal@^1.0.0": "https://registry.yarnpkg.com/is-decimal/-/is-decimal-1.0.4.tgz#65a3a5958a1c5b63a706e1b333d7cd9f630d3fa5", - "is-descriptor@^0.1.0": "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca", - "is-descriptor@^1.0.0": "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec", - "is-descriptor@^1.0.2": "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec", - "is-docker@^2.0.0": "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa", - "is-extendable@^0.1.0": "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89", - "is-extendable@^0.1.1": "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89", - "is-extendable@^1.0.1": "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4", "is-extglob@^2.1.1": "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2", "is-fullwidth-code-point@^3.0.0": "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d", "is-generator-fn@^2.0.0": "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118", "is-glob@^4.0.0": "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc", "is-glob@^4.0.1": "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc", "is-hexadecimal@^1.0.0": "https://registry.yarnpkg.com/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz#cc35c97588da4bd49a8eedd6bc4082d44dcb23a7", - "is-number@^3.0.0": "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195", "is-number@^7.0.0": "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b", "is-obj@^1.0.1": "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f", "is-obj@^2.0.0": "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982", "is-plain-obj@^1.1.0": "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e", - "is-plain-object@^2.0.3": "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677", - "is-plain-object@^2.0.4": "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677", + "is-plain-obj@^2.0.0": "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287", "is-plain-object@^5.0.0": "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344", - "is-potential-custom-element-name@^1.0.0": "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5", + "is-potential-custom-element-name@^1.0.1": "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5", "is-regexp@^1.0.0": "https://registry.yarnpkg.com/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069", - "is-stream@^1.1.0": "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44", "is-stream@^2.0.0": "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3", "is-text-path@^1.0.1": "https://registry.yarnpkg.com/is-text-path/-/is-text-path-1.0.1.tgz#4e1aa0fb51bfbcb3e92688001397202c1775b66e", "is-typedarray@^1.0.0": "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a", - "is-typedarray@~1.0.0": "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a", "is-unicode-supported@^0.1.0": "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7", - "is-whitespace-character@^1.0.0": "https://registry.yarnpkg.com/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz#0858edd94a95594c7c9dd0b5c174ec6e45ee4aa7", - "is-windows@^1.0.2": "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d", - "is-word-character@^1.0.0": "https://registry.yarnpkg.com/is-word-character/-/is-word-character-1.0.4.tgz#ce0e73216f98599060592f62ff31354ddbeb0230", - "is-wsl@^2.2.0": "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271", - "isarray@1.0.0": "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11", "isexe@^2.0.0": "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10", - "isobject@^2.0.0": "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89", - "isobject@^3.0.0": "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df", - "isobject@^3.0.1": "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df", - "isstream@~0.1.2": "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a", "istanbul-lib-coverage@^3.0.0": "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz#f5944a37c70b550b02a78a5c3b2055b280cec8ec", "istanbul-lib-instrument@^4.0.0": "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz#873c6fff897450118222774696a3f28902d77c1d", "istanbul-lib-instrument@^4.0.3": "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz#873c6fff897450118222774696a3f28902d77c1d", "istanbul-lib-report@^3.0.0": "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6", "istanbul-lib-source-maps@^4.0.0": "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz#75743ce6d96bb86dc7ee4352cf6366a23f0b1ad9", "istanbul-reports@^3.0.2": "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.2.tgz#d593210e5000683750cb09fc0644e4b6e27fd53b", - "jest-changed-files@^26.6.2": "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-26.6.2.tgz#f6198479e1cc66f22f9ae1e22acaa0b429c042d0", - "jest-circus@^26.6.3": "https://registry.yarnpkg.com/jest-circus/-/jest-circus-26.6.3.tgz#3cc7ef2a6a3787e5d7bfbe2c72d83262154053e7", - "jest-cli@^26.6.3": "https://registry.yarnpkg.com/jest-cli/-/jest-cli-26.6.3.tgz#43117cfef24bc4cd691a174a8796a532e135e92a", - "jest-config@^26.6.3": "https://registry.yarnpkg.com/jest-config/-/jest-config-26.6.3.tgz#64f41444eef9eb03dc51d5c53b75c8c71f645349", + "jest-changed-files@^27.0.2": "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-27.0.2.tgz#997253042b4a032950fc5f56abf3c5d1f8560801", + "jest-circus@^27.0.5": "https://registry.yarnpkg.com/jest-circus/-/jest-circus-27.0.5.tgz#b5e327f1d6857c8485126f8e364aefa4378debaa", + "jest-cli@^27.0.5": "https://registry.yarnpkg.com/jest-cli/-/jest-cli-27.0.5.tgz#f359ba042624cffb96b713010a94bffb7498a37c", + "jest-config@^27.0.5": "https://registry.yarnpkg.com/jest-config/-/jest-config-27.0.5.tgz#683da3b0d8237675c29c817f6e3aba1481028e19", "jest-diff@^26.0.0": "https://registry.yarnpkg.com/jest-diff/-/jest-diff-26.6.2.tgz#1aa7468b52c3a68d7d5c5fdcdfcd5e49bd164394", - "jest-diff@^26.6.2": "https://registry.yarnpkg.com/jest-diff/-/jest-diff-26.6.2.tgz#1aa7468b52c3a68d7d5c5fdcdfcd5e49bd164394", - "jest-docblock@^26.0.0": "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-26.0.0.tgz#3e2fa20899fc928cb13bd0ff68bd3711a36889b5", - "jest-each@^26.6.2": "https://registry.yarnpkg.com/jest-each/-/jest-each-26.6.2.tgz#02526438a77a67401c8a6382dfe5999952c167cb", - "jest-environment-jsdom@^26.6.2": "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-26.6.2.tgz#78d09fe9cf019a357009b9b7e1f101d23bd1da3e", - "jest-environment-node@^26.6.2": "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-26.6.2.tgz#824e4c7fb4944646356f11ac75b229b0035f2b0c", + "jest-diff@^27.0.2": "https://registry.yarnpkg.com/jest-diff/-/jest-diff-27.0.2.tgz#f315b87cee5dc134cf42c2708ab27375cc3f5a7e", + "jest-docblock@^27.0.1": "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-27.0.1.tgz#bd9752819b49fa4fab1a50b73eb58c653b962e8b", + "jest-each@^27.0.2": "https://registry.yarnpkg.com/jest-each/-/jest-each-27.0.2.tgz#865ddb4367476ced752167926b656fa0dcecd8c7", + "jest-environment-jsdom@^27.0.5": "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-27.0.5.tgz#c36771977cf4490a9216a70473b39161d193c212", + "jest-environment-node@^27.0.5": "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-27.0.5.tgz#b7238fc2b61ef2fb9563a3b7653a95fa009a6a54", "jest-get-type@^26.3.0": "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-26.3.0.tgz#e97dc3c3f53c2b406ca7afaed4493b1d099199e0", - "jest-haste-map@^26.6.2": "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-26.6.2.tgz#dd7e60fe7dc0e9f911a23d79c5ff7fb5c2cafeaa", - "jest-jasmine2@^26.6.3": "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-26.6.3.tgz#adc3cf915deacb5212c93b9f3547cd12958f2edd", - "jest-leak-detector@^26.6.2": "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-26.6.2.tgz#7717cf118b92238f2eba65054c8a0c9c653a91af", - "jest-matcher-utils@^26.6.2": "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-26.6.2.tgz#8e6fd6e863c8b2d31ac6472eeb237bc595e53e7a", - "jest-message-util@^26.6.2": "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-26.6.2.tgz#58173744ad6fc0506b5d21150b9be56ef001ca07", - "jest-mock@^26.6.2": "https://registry.yarnpkg.com/jest-mock/-/jest-mock-26.6.2.tgz#d6cb712b041ed47fe0d9b6fc3474bc6543feb302", + "jest-get-type@^27.0.1": "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-27.0.1.tgz#34951e2b08c8801eb28559d7eb732b04bbcf7815", + "jest-haste-map@^27.0.5": "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-27.0.5.tgz#2e1e55073b5328410a2c0d74b334e513d71f3470", + "jest-jasmine2@^27.0.5": "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-27.0.5.tgz#8a6eb2a685cdec3af13881145c77553e4e197776", + "jest-leak-detector@^27.0.2": "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-27.0.2.tgz#ce19aa9dbcf7a72a9d58907a970427506f624e69", + "jest-matcher-utils@^27.0.2": "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-27.0.2.tgz#f14c060605a95a466cdc759acc546c6f4cbfc4f0", + "jest-message-util@^27.0.2": "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-27.0.2.tgz#181c9b67dff504d8f4ad15cba10d8b80f272048c", + "jest-mock@^27.0.3": "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.0.3.tgz#5591844f9192b3335c0dca38e8e45ed297d4d23d", "jest-pnp-resolver@^1.2.2": "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c", - "jest-regex-util@^26.0.0": "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-26.0.0.tgz#d25e7184b36e39fd466c3bc41be0971e821fee28", - "jest-resolve-dependencies@^26.6.3": "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-26.6.3.tgz#6680859ee5d22ee5dcd961fe4871f59f4c784fb6", - "jest-resolve@^26.6.2": "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-26.6.2.tgz#a3ab1517217f469b504f1b56603c5bb541fbb507", - "jest-runner@^26.6.3": "https://registry.yarnpkg.com/jest-runner/-/jest-runner-26.6.3.tgz#2d1fed3d46e10f233fd1dbd3bfaa3fe8924be159", - "jest-runtime@^26.6.3": "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-26.6.3.tgz#4f64efbcfac398331b74b4b3c82d27d401b8fa2b", - "jest-serializer@^26.6.2": "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-26.6.2.tgz#d139aafd46957d3a448f3a6cdabe2919ba0742d1", - "jest-snapshot@^26.6.2": "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-26.6.2.tgz#f3b0af1acb223316850bd14e1beea9837fb39c84", - "jest-util@^26.1.0": "https://registry.yarnpkg.com/jest-util/-/jest-util-26.6.2.tgz#907535dbe4d5a6cb4c47ac9b926f6af29576cbc1", - "jest-util@^26.6.2": "https://registry.yarnpkg.com/jest-util/-/jest-util-26.6.2.tgz#907535dbe4d5a6cb4c47ac9b926f6af29576cbc1", - "jest-validate@^26.6.2": "https://registry.yarnpkg.com/jest-validate/-/jest-validate-26.6.2.tgz#23d380971587150467342911c3d7b4ac57ab20ec", - "jest-watcher@^26.6.2": "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-26.6.2.tgz#a5b683b8f9d68dbcb1d7dae32172d2cca0592975", - "jest-worker@^26.6.2": "https://registry.yarnpkg.com/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed", - "jest@^26.6.3": "https://registry.yarnpkg.com/jest/-/jest-26.6.3.tgz#40e8fdbe48f00dfa1f0ce8121ca74b88ac9148ef", + "jest-regex-util@^27.0.1": "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-27.0.1.tgz#69d4b1bf5b690faa3490113c47486ed85dd45b68", + "jest-resolve-dependencies@^27.0.5": "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-27.0.5.tgz#819ccdddd909c65acddb063aac3a49e4ba1ed569", + "jest-resolve@^27.0.5": "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-27.0.5.tgz#937535a5b481ad58e7121eaea46d1424a1e0c507", + "jest-runner@^27.0.5": "https://registry.yarnpkg.com/jest-runner/-/jest-runner-27.0.5.tgz#b6fdc587e1a5056339205914294555c554efc08a", + "jest-runtime@^27.0.5": "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-27.0.5.tgz#cd5d1aa9754d30ddf9f13038b3cb7b95b46f552d", + "jest-serializer@^27.0.1": "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-27.0.1.tgz#2464d04dcc33fb71dc80b7c82e3c5e8a08cb1020", + "jest-snapshot@^27.0.5": "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-27.0.5.tgz#6e3b9e8e193685372baff771ba34af631fe4d4d5", + "jest-util@^27.0.0": "https://registry.yarnpkg.com/jest-util/-/jest-util-27.0.2.tgz#fc2c7ace3c75ae561cf1e5fdb643bf685a5be7c7", + "jest-util@^27.0.2": "https://registry.yarnpkg.com/jest-util/-/jest-util-27.0.2.tgz#fc2c7ace3c75ae561cf1e5fdb643bf685a5be7c7", + "jest-validate@^27.0.2": "https://registry.yarnpkg.com/jest-validate/-/jest-validate-27.0.2.tgz#7fe2c100089449cd5cbb47a5b0b6cb7cda5beee5", + "jest-watcher@^27.0.2": "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-27.0.2.tgz#dab5f9443e2d7f52597186480731a8c6335c5deb", + "jest-worker@^27.0.2": "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.0.2.tgz#4ebeb56cef48b3e7514552f80d0d80c0129f0b05", + "jest@^27.0.5": "https://registry.yarnpkg.com/jest/-/jest-27.0.5.tgz#141825e105514a834cc8d6e44670509e8d74c5f2", "js-tokens@^4.0.0": "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499", "js-yaml@^3.13.1": "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537", "js-yaml@^4.1.0": "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602", - "jsbn@~0.1.0": "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513", - "jsdom@^16.4.0": "https://registry.yarnpkg.com/jsdom/-/jsdom-16.5.3.tgz#13a755b3950eb938b4482c407238ddf16f0d2136", + "jsdom@^16.6.0": "https://registry.yarnpkg.com/jsdom/-/jsdom-16.6.0.tgz#f79b3786682065492a3da6a60a4695da983805ac", "jsesc@^2.5.1": "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4", "json-parse-even-better-errors@^2.3.0": "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d", "json-schema-traverse@^0.4.1": "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660", "json-schema-traverse@^1.0.0": "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2", - "json-schema@0.2.3": "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13", "json-stable-stringify-without-jsonify@^1.0.1": "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651", "json-stringify-safe@^5.0.1": "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb", - "json-stringify-safe@~5.0.1": "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb", "json5@2.x": "https://registry.yarnpkg.com/json5/-/json5-2.2.0.tgz#2dfefe720c6ba525d9ebd909950f0515316c89a3", "json5@^2.1.2": "https://registry.yarnpkg.com/json5/-/json5-2.2.0.tgz#2dfefe720c6ba525d9ebd909950f0515316c89a3", "jsonfile@^6.0.1": "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae", "jsonparse@^1.2.0": "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280", - "jsprim@^1.2.2": "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2", - "kind-of@^3.0.2": "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64", - "kind-of@^3.0.3": "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64", - "kind-of@^3.2.0": "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64", - "kind-of@^4.0.0": "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57", - "kind-of@^5.0.0": "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d", - "kind-of@^6.0.0": "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd", - "kind-of@^6.0.2": "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd", "kind-of@^6.0.3": "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd", "kleur@^3.0.3": "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e", "leven@^3.1.0": "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2", @@ -601,80 +514,85 @@ "levn@~0.3.0": "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee", "lines-and-columns@^1.1.6": "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00", "lint-staged@^11.0.0": "https://registry.yarnpkg.com/lint-staged/-/lint-staged-11.0.0.tgz#24d0a95aa316ba28e257f5c4613369a75a10c712", - "listr2@^3.8.2": "https://registry.yarnpkg.com/listr2/-/listr2-3.8.2.tgz#99b138ad1cfb08f1b0aacd422972e49b2d814b99", + "listr2@^3.8.2": "https://registry.yarnpkg.com/listr2/-/listr2-3.10.0.tgz#58105a53ed7fa1430d1b738c6055ef7bb006160f", "locate-path@^5.0.0": "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0", "locate-path@^6.0.0": "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286", "lodash.clonedeep@^4.5.0": "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef", + "lodash.merge@^4.6.2": "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a", "lodash.set@^4.3.2": "https://registry.yarnpkg.com/lodash.set/-/lodash.set-4.3.2.tgz#d8757b1da807dde24816b0d6a84bea1a76230b23", "lodash.truncate@^4.4.2": "https://registry.yarnpkg.com/lodash.truncate/-/lodash.truncate-4.4.2.tgz#5a350da0b1113b837ecfffd5812cbe58d6eae193", "lodash@4.x": "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c", "lodash@^4.17.15": "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c", "lodash@^4.17.19": "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c", - "lodash@^4.17.21": "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c", "lodash@^4.7.0": "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c", "log-symbols@^4.1.0": "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503", "log-update@^4.0.0": "https://registry.yarnpkg.com/log-update/-/log-update-4.0.0.tgz#589ecd352471f2a1c0c570287543a64dfd20e0a1", + "longest-streak@^2.0.0": "https://registry.yarnpkg.com/longest-streak/-/longest-streak-2.0.4.tgz#b8599957da5b5dab64dee3fe316fa774597d90e4", "lru-cache@^6.0.0": "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94", "make-dir@^3.0.0": "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f", "make-error@1.x": "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2", "makeerror@1.0.x": "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.11.tgz#e01a5c9109f2af79660e4e8b9587790184f5a96c", - "map-cache@^0.2.2": "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf", "map-obj@^1.0.0": "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d", "map-obj@^4.0.0": "https://registry.yarnpkg.com/map-obj/-/map-obj-4.2.1.tgz#e4ea399dbc979ae735c83c863dd31bdf364277b7", - "map-visit@^1.0.0": "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f", - "markdown-escapes@^1.0.0": "https://registry.yarnpkg.com/markdown-escapes/-/markdown-escapes-1.0.4.tgz#c95415ef451499d7602b91095f3c8e8975f78535", + "markdown-table@^2.0.0": "https://registry.yarnpkg.com/markdown-table/-/markdown-table-2.0.0.tgz#194a90ced26d31fe753d8b9434430214c011865b", + "mdast-util-find-and-replace@^1.1.0": "https://registry.yarnpkg.com/mdast-util-find-and-replace/-/mdast-util-find-and-replace-1.1.1.tgz#b7db1e873f96f66588c321f1363069abf607d1b5", + "mdast-util-footnote@^0.1.0": "https://registry.yarnpkg.com/mdast-util-footnote/-/mdast-util-footnote-0.1.7.tgz#4b226caeab4613a3362c144c94af0fdd6f7e0ef0", + "mdast-util-from-markdown@^0.8.0": "https://registry.yarnpkg.com/mdast-util-from-markdown/-/mdast-util-from-markdown-0.8.5.tgz#d1ef2ca42bc377ecb0463a987910dae89bd9a28c", + "mdast-util-frontmatter@^0.2.0": "https://registry.yarnpkg.com/mdast-util-frontmatter/-/mdast-util-frontmatter-0.2.0.tgz#8bd5cd55e236c03e204a036f7372ebe9e6748240", + "mdast-util-gfm-autolink-literal@^0.1.0": "https://registry.yarnpkg.com/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-0.1.3.tgz#9c4ff399c5ddd2ece40bd3b13e5447d84e385fb7", + "mdast-util-gfm-strikethrough@^0.2.0": "https://registry.yarnpkg.com/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-0.2.3.tgz#45eea337b7fff0755a291844fbea79996c322890", + "mdast-util-gfm-table@^0.1.0": "https://registry.yarnpkg.com/mdast-util-gfm-table/-/mdast-util-gfm-table-0.1.6.tgz#af05aeadc8e5ee004eeddfb324b2ad8c029b6ecf", + "mdast-util-gfm-task-list-item@^0.1.0": "https://registry.yarnpkg.com/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-0.1.6.tgz#70c885e6b9f543ddd7e6b41f9703ee55b084af10", + "mdast-util-gfm@^0.1.0": "https://registry.yarnpkg.com/mdast-util-gfm/-/mdast-util-gfm-0.1.2.tgz#8ecddafe57d266540f6881f5c57ff19725bd351c", + "mdast-util-to-markdown@^0.6.0": "https://registry.yarnpkg.com/mdast-util-to-markdown/-/mdast-util-to-markdown-0.6.5.tgz#b33f67ca820d69e6cc527a93d4039249b504bebe", + "mdast-util-to-markdown@^0.6.1": "https://registry.yarnpkg.com/mdast-util-to-markdown/-/mdast-util-to-markdown-0.6.5.tgz#b33f67ca820d69e6cc527a93d4039249b504bebe", + "mdast-util-to-markdown@~0.6.0": "https://registry.yarnpkg.com/mdast-util-to-markdown/-/mdast-util-to-markdown-0.6.5.tgz#b33f67ca820d69e6cc527a93d4039249b504bebe", + "mdast-util-to-string@^2.0.0": "https://registry.yarnpkg.com/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz#b8cfe6a713e1091cb5b728fc48885a4767f8b97b", "memize@^1.1.0": "https://registry.yarnpkg.com/memize/-/memize-1.1.0.tgz#4a5a684ac6992a13b1299043f3e49b1af6a0b0d3", "meow@^8.0.0": "https://registry.yarnpkg.com/meow/-/meow-8.1.2.tgz#bcbe45bda0ee1729d350c03cffc8395a36c4e897", "merge-stream@^2.0.0": "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60", "merge2@^1.3.0": "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae", - "micromatch@^3.1.4": "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23", + "micromark-extension-footnote@^0.3.0": "https://registry.yarnpkg.com/micromark-extension-footnote/-/micromark-extension-footnote-0.3.2.tgz#129b74ef4920ce96719b2c06102ee7abb2b88a20", + "micromark-extension-frontmatter@^0.2.0": "https://registry.yarnpkg.com/micromark-extension-frontmatter/-/micromark-extension-frontmatter-0.2.2.tgz#61b8e92e9213e1d3c13f5a59e7862f5ca98dfa53", + "micromark-extension-gfm-autolink-literal@~0.5.0": "https://registry.yarnpkg.com/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-0.5.7.tgz#53866c1f0c7ef940ae7ca1f72c6faef8fed9f204", + "micromark-extension-gfm-strikethrough@~0.6.5": "https://registry.yarnpkg.com/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-0.6.5.tgz#96cb83356ff87bf31670eefb7ad7bba73e6514d1", + "micromark-extension-gfm-table@~0.4.0": "https://registry.yarnpkg.com/micromark-extension-gfm-table/-/micromark-extension-gfm-table-0.4.3.tgz#4d49f1ce0ca84996c853880b9446698947f1802b", + "micromark-extension-gfm-tagfilter@~0.3.0": "https://registry.yarnpkg.com/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-0.3.0.tgz#d9f26a65adee984c9ccdd7e182220493562841ad", + "micromark-extension-gfm-task-list-item@~0.3.0": "https://registry.yarnpkg.com/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-0.3.3.tgz#d90c755f2533ed55a718129cee11257f136283b8", + "micromark-extension-gfm@^0.3.0": "https://registry.yarnpkg.com/micromark-extension-gfm/-/micromark-extension-gfm-0.3.3.tgz#36d1a4c089ca8bdfd978c9bd2bf1a0cb24e2acfe", + "micromark@^2.11.3": "https://registry.yarnpkg.com/micromark/-/micromark-2.11.4.tgz#d13436138eea826383e822449c9a5c50ee44665a", + "micromark@~2.11.0": "https://registry.yarnpkg.com/micromark/-/micromark-2.11.4.tgz#d13436138eea826383e822449c9a5c50ee44665a", + "micromark@~2.11.3": "https://registry.yarnpkg.com/micromark/-/micromark-2.11.4.tgz#d13436138eea826383e822449c9a5c50ee44665a", "micromatch@^4.0.2": "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9", "micromatch@^4.0.4": "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9", - "mime-db@1.47.0": "https://registry.yarnpkg.com/mime-db/-/mime-db-1.47.0.tgz#8cb313e59965d3c05cfbf898915a267af46a335c", - "mime-types@^2.1.12": "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.30.tgz#6e7be8b4c479825f85ed6326695db73f9305d62d", - "mime-types@~2.1.19": "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.30.tgz#6e7be8b4c479825f85ed6326695db73f9305d62d", + "mime-db@1.48.0": "https://registry.yarnpkg.com/mime-db/-/mime-db-1.48.0.tgz#e35b31045dd7eada3aaad537ed88a33afbef2d1d", + "mime-types@^2.1.12": "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.31.tgz#a00d76b74317c61f9c2db2218b8e9f8e9c5c9e6b", "mimic-fn@^2.1.0": "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b", "min-indent@^1.0.0": "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869", "minimatch@^3.0.4": "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083", "minimist-options@4.1.0": "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619", - "minimist@^1.1.1": "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602", - "minimist@^1.2.0": "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602", "minimist@^1.2.5": "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602", - "mixin-deep@^1.2.0": "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566", "mkdirp@1.x": "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e", "moment@^2.29.1": "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3", - "ms@2.0.0": "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8", "ms@2.1.2": "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009", - "nanomatch@^1.2.9": "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119", "natural-compare@^1.4.0": "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7", - "nice-try@^1.0.4": "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366", - "nock@^13.0.11": "https://registry.yarnpkg.com/nock/-/nock-13.0.11.tgz#ba733252e720897ca50033205c39db0c7470f331", + "nock@^13.1.0": "https://registry.yarnpkg.com/nock/-/nock-13.1.0.tgz#41c8ce8b35ab7d618c4cbf40de1d5bce319979ba", "node-fetch@^2.6.1": "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052", "node-int64@^0.4.0": "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b", "node-modules-regexp@^1.0.0": "https://registry.yarnpkg.com/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz#8d9dbe28964a4ac5712e9131642107c71e90ec40", - "node-notifier@^8.0.0": "https://registry.yarnpkg.com/node-notifier/-/node-notifier-8.0.2.tgz#f3167a38ef0d2c8a866a83e318c1ba0efeb702c5", - "node-releases@^1.1.71": "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.71.tgz#cb1334b179896b1c89ecfdd4b725fb7bbdfc7dbb", + "node-releases@^1.1.71": "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.73.tgz#dd4e81ddd5277ff846b80b52bb40c49edf7a7b20", "normalize-package-data@^2.5.0": "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8", "normalize-package-data@^3.0.0": "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-3.0.2.tgz#cae5c410ae2434f9a6c1baa65d5bc3b9366c8699", - "normalize-path@^2.1.1": "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9", "normalize-path@^3.0.0": "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65", - "npm-run-path@^2.0.0": "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f", - "npm-run-path@^4.0.0": "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea", "npm-run-path@^4.0.1": "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea", "nwsapi@^2.2.0": "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7", - "oauth-sign@~0.9.0": "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455", - "object-copy@^0.1.0": "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c", - "object-visit@^1.0.0": "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb", - "object.pick@^1.3.0": "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747", "once@^1.3.0": "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1", - "once@^1.3.1": "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1", "once@^1.4.0": "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1", "onetime@^5.1.0": "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e", "onetime@^5.1.2": "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e", "optionator@^0.8.1": "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495", "optionator@^0.9.1": "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499", "p-each-series@^2.1.0": "https://registry.yarnpkg.com/p-each-series/-/p-each-series-2.2.0.tgz#105ab0357ce72b202a8a8b94933672657b5e2a9a", - "p-finally@^1.0.0": "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae", "p-limit@^2.2.0": "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1", "p-limit@^3.0.2": "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b", "p-locate@^4.1.0": "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07", @@ -682,41 +600,34 @@ "p-map@^4.0.0": "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b", "p-try@^2.0.0": "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6", "parent-module@^1.0.0": "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2", - "parse-entities@^1.1.0": "https://registry.yarnpkg.com/parse-entities/-/parse-entities-1.2.2.tgz#c31bf0f653b6661354f8973559cb86dd1d5edf50", + "parse-entities@^2.0.0": "https://registry.yarnpkg.com/parse-entities/-/parse-entities-2.0.0.tgz#53c6eb5b9314a1f4ec99fa0fdf7ce01ecda0cbe8", "parse-json@^5.0.0": "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd", "parse5@6.0.1": "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b", - "pascalcase@^0.1.1": "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14", "path-exists@^4.0.0": "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3", "path-is-absolute@^1.0.0": "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f", - "path-key@^2.0.0": "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40", - "path-key@^2.0.1": "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40", "path-key@^3.0.0": "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375", "path-key@^3.1.0": "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375", - "path-parse@^1.0.6": "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c", + "path-parse@^1.0.6": "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735", "path-type@^4.0.0": "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b", - "performance-now@^2.1.0": "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b", - "picomatch@^2.0.4": "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.3.tgz#465547f359ccc206d3c48e46a1bcb89bf7ee619d", - "picomatch@^2.2.1": "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.3.tgz#465547f359ccc206d3c48e46a1bcb89bf7ee619d", - "picomatch@^2.2.3": "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.3.tgz#465547f359ccc206d3c48e46a1bcb89bf7ee619d", + "picomatch@^2.0.4": "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.0.tgz#f1f061de8f6a4bf022892e2d128234fb98302972", + "picomatch@^2.2.1": "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.0.tgz#f1f061de8f6a4bf022892e2d128234fb98302972", + "picomatch@^2.2.3": "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.0.tgz#f1f061de8f6a4bf022892e2d128234fb98302972", "pinst@^2.1.6": "https://registry.yarnpkg.com/pinst/-/pinst-2.1.6.tgz#8d968b8ec1dac5dddcfc976c735592dbec58b42c", "pirates@^4.0.1": "https://registry.yarnpkg.com/pirates/-/pirates-4.0.1.tgz#643a92caf894566f91b2b986d2c66950a8e2fb87", "pkg-dir@^4.2.0": "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3", "please-upgrade-node@^3.2.0": "https://registry.yarnpkg.com/please-upgrade-node/-/please-upgrade-node-3.2.0.tgz#aeddd3f994c933e4ad98b99d9a556efa0e2fe942", - "posix-character-classes@^0.1.0": "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab", "prelude-ls@^1.2.1": "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396", "prelude-ls@~1.1.2": "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54", "pretty-format@^26.0.0": "https://registry.yarnpkg.com/pretty-format/-/pretty-format-26.6.2.tgz#e35c2705f14cb7fe2fe94fa078345b444120fc93", "pretty-format@^26.6.2": "https://registry.yarnpkg.com/pretty-format/-/pretty-format-26.6.2.tgz#e35c2705f14cb7fe2fe94fa078345b444120fc93", + "pretty-format@^27.0.2": "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.0.2.tgz#9283ff8c4f581b186b2d4da461617143dca478a4", "progress@^2.0.0": "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8", "prompts@^2.0.1": "https://registry.yarnpkg.com/prompts/-/prompts-2.4.1.tgz#befd3b1195ba052f9fd2fde8a486c4e82ee77f61", "propagate@^2.0.0": "https://registry.yarnpkg.com/propagate/-/propagate-2.0.1.tgz#40cdedab18085c792334e64f0ac17256d38f9a45", - "psl@^1.1.28": "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24", "psl@^1.1.33": "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24", - "pump@^3.0.0": "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64", "punycode@^2.1.0": "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec", "punycode@^2.1.1": "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec", "q@^1.5.1": "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7", - "qs@~6.5.2": "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36", "queue-microtask@^1.2.2": "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243", "quick-lru@^4.0.1": "https://registry.yarnpkg.com/quick-lru/-/quick-lru-4.0.1.tgz#5b8878f113a58217848c6482026c73e1ba57727f", "react-is@^17.0.1": "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0", @@ -725,86 +636,53 @@ "readable-stream@3": "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198", "readable-stream@^3.0.0": "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198", "redent@^3.0.0": "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f", - "regex-not@^1.0.0": "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c", - "regex-not@^1.0.2": "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c", - "regexpp@^3.0.0": "https://registry.yarnpkg.com/regexpp/-/regexpp-3.1.0.tgz#206d0ad0a5648cffbdb8ae46438f3dc51c9f78e2", - "regexpp@^3.1.0": "https://registry.yarnpkg.com/regexpp/-/regexpp-3.1.0.tgz#206d0ad0a5648cffbdb8ae46438f3dc51c9f78e2", - "remark-frontmatter@^1.3.3": "https://registry.yarnpkg.com/remark-frontmatter/-/remark-frontmatter-1.3.3.tgz#67ec63c89da5a84bb793ecec166e11b4eb47af10", - "remark-parse@^5.0.0": "https://registry.yarnpkg.com/remark-parse/-/remark-parse-5.0.0.tgz#4c077f9e499044d1d5c13f80d7a98cf7b9285d95", - "remove-trailing-separator@^1.0.1": "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef", - "repeat-element@^1.1.2": "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9", - "repeat-string@^1.5.4": "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637", - "repeat-string@^1.6.1": "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637", - "replace-ext@1.0.0": "https://registry.yarnpkg.com/replace-ext/-/replace-ext-1.0.0.tgz#de63128373fcbf7c3ccfa4de5a480c45a67958eb", - "request-promise-core@1.1.4": "https://registry.yarnpkg.com/request-promise-core/-/request-promise-core-1.1.4.tgz#3eedd4223208d419867b78ce815167d10593a22f", - "request-promise-native@^1.0.9": "https://registry.yarnpkg.com/request-promise-native/-/request-promise-native-1.0.9.tgz#e407120526a5efdc9a39b28a5679bf47b9d9dc28", - "request@^2.88.2": "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3", + "regexpp@^3.1.0": "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2", + "remark-footnotes@^3.0.0": "https://registry.yarnpkg.com/remark-footnotes/-/remark-footnotes-3.0.0.tgz#5756b56f8464fa7ed80dbba0c966136305d8cb8d", + "remark-frontmatter@^3.0.0": "https://registry.yarnpkg.com/remark-frontmatter/-/remark-frontmatter-3.0.0.tgz#ca5d996361765c859bd944505f377d6b186a6ec6", + "remark-gfm@^1.0.0": "https://registry.yarnpkg.com/remark-gfm/-/remark-gfm-1.0.0.tgz#9213643001be3f277da6256464d56fd28c3b3c0d", + "remark-parse@^9.0.0": "https://registry.yarnpkg.com/remark-parse/-/remark-parse-9.0.0.tgz#4d20a299665880e4f4af5d90b7c7b8a935853640", + "repeat-string@^1.0.0": "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637", "require-directory@^2.1.1": "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42", "require-from-string@^2.0.2": "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909", - "require-main-filename@^2.0.0": "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b", "resolve-cwd@^3.0.0": "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d", "resolve-from@5.0.0": "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69", "resolve-from@^4.0.0": "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6", "resolve-from@^5.0.0": "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69", "resolve-global@1.0.0": "https://registry.yarnpkg.com/resolve-global/-/resolve-global-1.0.0.tgz#a2a79df4af2ca3f49bf77ef9ddacd322dad19255", "resolve-global@^1.0.0": "https://registry.yarnpkg.com/resolve-global/-/resolve-global-1.0.0.tgz#a2a79df4af2ca3f49bf77ef9ddacd322dad19255", - "resolve-url@^0.2.1": "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a", "resolve@^1.10.0": "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975", - "resolve@^1.18.1": "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975", "resolve@^1.20.0": "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975", "restore-cursor@^3.1.0": "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e", - "ret@~0.1.10": "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc", "reusify@^1.0.4": "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76", "rimraf@^3.0.0": "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a", "rimraf@^3.0.2": "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a", - "rsvp@^4.8.4": "https://registry.yarnpkg.com/rsvp/-/rsvp-4.8.5.tgz#c8f155311d167f68f21e168df71ec5b083113734", "run-parallel@^1.1.9": "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee", "rxjs@^6.6.7": "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9", - "safe-buffer@^5.0.1": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6", - "safe-buffer@^5.1.2": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6", "safe-buffer@~5.1.1": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d", "safe-buffer@~5.2.0": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6", - "safe-regex@^1.1.0": "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e", "safer-buffer@>= 2.1.2 < 3": "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a", - "safer-buffer@^2.0.2": "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a", - "safer-buffer@^2.1.0": "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a", - "safer-buffer@~2.1.0": "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a", - "sane@^4.0.3": "https://registry.yarnpkg.com/sane/-/sane-4.1.0.tgz#ed881fd922733a6c461bc189dc2b6c006f3ffded", "saxes@^5.0.1": "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d", "semver-compare@^1.0.0": "https://registry.yarnpkg.com/semver-compare/-/semver-compare-1.0.0.tgz#0dee216a1c941ab37e9efb1788f6afc5ff5537fc", "semver@2 || 3 || 4 || 5": "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7", "semver@7.3.5": "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7", "semver@7.x": "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7", - "semver@^5.5.0": "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7", "semver@^6.0.0": "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d", "semver@^6.3.0": "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d", "semver@^7.2.1": "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7", "semver@^7.3.2": "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7", "semver@^7.3.4": "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7", - "set-blocking@^2.0.0": "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7", - "set-value@^2.0.0": "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b", - "set-value@^2.0.1": "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b", - "shebang-command@^1.2.0": "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea", + "semver@^7.3.5": "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7", "shebang-command@^2.0.0": "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea", - "shebang-regex@^1.0.0": "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3", "shebang-regex@^3.0.0": "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172", "shell-escape@^0.2.0": "https://registry.yarnpkg.com/shell-escape/-/shell-escape-0.2.0.tgz#68fd025eb0490b4f567a027f0bf22480b5f84133", - "shellwords@^0.1.1": "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b", - "signal-exit@^3.0.0": "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c", "signal-exit@^3.0.2": "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c", "signal-exit@^3.0.3": "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c", "sisteransi@^1.0.5": "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed", "slash@^3.0.0": "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634", "slice-ansi@^3.0.0": "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-3.0.0.tgz#31ddc10930a1b7e0b67b08c96c2f49b77a789787", "slice-ansi@^4.0.0": "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b", - "snapdragon-node@^2.0.1": "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b", - "snapdragon-util@^3.0.1": "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2", - "snapdragon@^0.8.1": "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d", - "source-map-resolve@^0.5.0": "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a", "source-map-support@^0.5.6": "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61", - "source-map-url@^0.4.0": "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.1.tgz#0af66605a745a5a2f91cf1bbf8a7afbc283dec56", "source-map@^0.5.0": "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc", - "source-map@^0.5.6": "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc", "source-map@^0.6.0": "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263", "source-map@^0.6.1": "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263", "source-map@^0.7.3": "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383", @@ -812,17 +690,11 @@ "spdx-correct@^3.0.0": "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9", "spdx-exceptions@^2.1.0": "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d", "spdx-expression-parse@^3.0.0": "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679", - "spdx-license-ids@^3.0.0": "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.7.tgz#e9c18a410e5ed7e12442a549fbd8afa767038d65", - "split-string@^3.0.1": "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2", - "split-string@^3.0.2": "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2", + "spdx-license-ids@^3.0.0": "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.9.tgz#8a595135def9592bda69709474f1cbeea7c2467f", "split2@^3.0.0": "https://registry.yarnpkg.com/split2/-/split2-3.2.2.tgz#bf2cf2a37d838312c249c89206fd7a17dd12365f", "sprintf-js@^1.1.2": "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.2.tgz#da1765262bf8c0f571749f2ad6c26300207ae673", "sprintf-js@~1.0.2": "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c", - "sshpk@^1.7.0": "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877", - "stack-utils@^2.0.2": "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.3.tgz#cd5f030126ff116b78ccb3c027fe302713b61277", - "state-toggle@^1.0.0": "https://registry.yarnpkg.com/state-toggle/-/state-toggle-1.0.3.tgz#e123b16a88e143139b09c6852221bc9815917dfe", - "static-extend@^0.1.1": "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6", - "stealthy-require@^1.1.1": "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b", + "stack-utils@^2.0.3": "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.3.tgz#cd5f030126ff116b78ccb3c027fe302713b61277", "string-argv@0.3.1": "https://registry.yarnpkg.com/string-argv/-/string-argv-0.3.1.tgz#95e2fbec0427ae19184935f816d74aaa4c5c19da", "string-length@^4.0.1": "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a", "string-width@^4.1.0": "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5", @@ -831,51 +703,39 @@ "stringify-object@^3.3.0": "https://registry.yarnpkg.com/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629", "strip-ansi@^6.0.0": "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532", "strip-bom@^4.0.0": "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878", - "strip-eof@^1.0.0": "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf", "strip-final-newline@^2.0.0": "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad", "strip-indent@^3.0.0": "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001", "strip-json-comments@^3.1.0": "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006", "strip-json-comments@^3.1.1": "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006", - "structured-source@^3.0.2": "https://registry.yarnpkg.com/structured-source/-/structured-source-3.0.2.tgz#dd802425e0f53dc4a6e7aca3752901a1ccda7af5", "supports-color@^5.3.0": "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f", "supports-color@^7.0.0": "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da", "supports-color@^7.1.0": "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da", + "supports-color@^8.0.0": "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c", "supports-hyperlinks@^2.0.0": "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb", "symbol-tree@^3.2.4": "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2", - "table@^6.0.4": "https://registry.yarnpkg.com/table/-/table-6.7.0.tgz#26274751f0ee099c547f6cb91d3eff0d61d155b2", + "table@^6.0.9": "https://registry.yarnpkg.com/table/-/table-6.7.1.tgz#ee05592b7143831a8c94f3cee6aae4c1ccef33e2", "terminal-link@^2.0.0": "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994", "test-exclude@^6.0.0": "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e", "text-extensions@^1.0.0": "https://registry.yarnpkg.com/text-extensions/-/text-extensions-1.9.0.tgz#1853e45fee39c945ce6f6c36b2d659b5aabc2a26", "text-table@^0.2.0": "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4", - "throat@^5.0.0": "https://registry.yarnpkg.com/throat/-/throat-5.0.0.tgz#c5199235803aad18754a667d659b5e72ce16764b", + "throat@^6.0.1": "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375", "through2@^4.0.0": "https://registry.yarnpkg.com/through2/-/through2-4.0.2.tgz#a7ce3ac2a7a8b0b966c80e7c49f0484c3b239764", "through@>=2.2.7 <3": "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5", "through@^2.3.8": "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5", "tmpl@1.0.x": "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1", "to-fast-properties@^2.0.0": "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e", - "to-object-path@^0.3.0": "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af", - "to-regex-range@^2.1.0": "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38", "to-regex-range@^5.0.1": "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4", - "to-regex@^3.0.1": "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce", - "to-regex@^3.0.2": "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce", - "tough-cookie@^2.3.3": "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2", "tough-cookie@^4.0.0": "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.0.0.tgz#d822234eeca882f991f0f908824ad2622ddbece4", - "tough-cookie@~2.5.0": "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2", - "tr46@^2.0.2": "https://registry.yarnpkg.com/tr46/-/tr46-2.0.2.tgz#03273586def1595ae08fedb38d7733cee91d2479", + "tr46@^2.1.0": "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240", "traverse@^0.6.6": "https://registry.yarnpkg.com/traverse/-/traverse-0.6.6.tgz#cbdf560fd7b9af632502fed40f918c157ea97137", - "trim-newlines@^3.0.0": "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.0.tgz#79726304a6a898aa8373427298d54c2ee8b1cb30", + "trim-newlines@^3.0.0": "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.1.tgz#260a5d962d8b752425b32f3a7db0dcacd176c144", "trim-off-newlines@^1.0.0": "https://registry.yarnpkg.com/trim-off-newlines/-/trim-off-newlines-1.0.1.tgz#9f9ba9d9efa8764c387698bcbfeb2c848f11adb3", - "trim-trailing-lines@^1.0.0": "https://registry.yarnpkg.com/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz#bd4abbec7cc880462f10b2c8b5ce1d8d1ec7c2c0", - "trim@0.0.1": "https://registry.yarnpkg.com/trim/-/trim-0.0.1.tgz#5858547f6b290757ee95cccc666fb50084c460dd", "trough@^1.0.0": "https://registry.yarnpkg.com/trough/-/trough-1.0.5.tgz#b8b639cefad7d0bb2abd37d433ff8293efa5f406", - "ts-jest@^26.5.6": "https://registry.yarnpkg.com/ts-jest/-/ts-jest-26.5.6.tgz#c32e0746425274e1dfe333f43cd3c800e014ec35", + "ts-jest@^27.0.3": "https://registry.yarnpkg.com/ts-jest/-/ts-jest-27.0.3.tgz#808492f022296cde19390bb6ad627c8126bf93f8", "tslib@^1.8.1": "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00", "tslib@^1.9.0": "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00", - "tsutils@^3.17.1": "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623", - "tunnel-agent@^0.6.0": "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd", + "tsutils@^3.21.0": "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623", "tunnel@0.0.6": "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c", - "tweetnacl@^0.14.3": "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64", - "tweetnacl@~0.14.0": "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64", "type-check@^0.4.0": "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1", "type-check@~0.3.2": "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72", "type-check@~0.4.0": "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1", @@ -886,71 +746,51 @@ "type-fest@^0.6.0": "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b", "type-fest@^0.8.1": "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d", "typedarray-to-buffer@^3.1.5": "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080", - "typescript@^4.2.4": "https://registry.yarnpkg.com/typescript/-/typescript-4.2.4.tgz#8610b59747de028fda898a8aef0e103f156d0961", - "unherit@^1.0.4": "https://registry.yarnpkg.com/unherit/-/unherit-1.1.3.tgz#6c9b503f2b41b262330c80e91c8614abdaa69c22", - "unified@^6.2.0": "https://registry.yarnpkg.com/unified/-/unified-6.2.0.tgz#7fbd630f719126d67d40c644b7e3f617035f6dba", - "union-value@^1.0.0": "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847", - "unist-util-is@^3.0.0": "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-3.0.0.tgz#d9e84381c2468e82629e4a5be9d7d05a2dd324cd", - "unist-util-remove-position@^1.0.0": "https://registry.yarnpkg.com/unist-util-remove-position/-/unist-util-remove-position-1.1.4.tgz#ec037348b6102c897703eee6d0294ca4755a2020", - "unist-util-stringify-position@^1.0.0": "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-1.1.2.tgz#3f37fcf351279dcbca7480ab5889bb8a832ee1c6", - "unist-util-stringify-position@^1.1.1": "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-1.1.2.tgz#3f37fcf351279dcbca7480ab5889bb8a832ee1c6", - "unist-util-visit-parents@^2.0.0": "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-2.1.2.tgz#25e43e55312166f3348cae6743588781d112c1e9", - "unist-util-visit@^1.1.0": "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-1.4.1.tgz#4724aaa8486e6ee6e26d7ff3c8685960d560b1e3", + "typescript@^4.3.4": "https://registry.yarnpkg.com/typescript/-/typescript-4.3.4.tgz#3f85b986945bcf31071decdd96cf8bfa65f9dcbc", + "unified@^9.2.1": "https://registry.yarnpkg.com/unified/-/unified-9.2.1.tgz#ae18d5674c114021bfdbdf73865ca60f410215a3", + "unist-util-is@^4.0.0": "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-4.1.0.tgz#976e5f462a7a5de73d94b706bac1b90671b57797", + "unist-util-stringify-position@^2.0.0": "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz#cce3bfa1cdf85ba7375d1d5b17bdc4cada9bd9da", + "unist-util-visit-parents@^3.0.0": "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz#65a6ce698f78a6b0f56aa0e88f13801886cdaef6", "universal-user-agent@^6.0.0": "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.0.tgz#3381f8503b251c0d9cd21bc1de939ec9df5480ee", "universalify@^0.1.2": "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66", "universalify@^2.0.0": "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717", - "unset-value@^1.0.0": "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559", "update-section@^0.3.3": "https://registry.yarnpkg.com/update-section/-/update-section-0.3.3.tgz#458f17820d37820dc60e20b86d94391b00123158", "uri-js@^4.2.2": "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e", - "urix@^0.1.0": "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72", - "use@^3.1.0": "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f", "util-deprecate@^1.0.1": "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf", - "uuid@^3.3.2": "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee", - "uuid@^8.3.0": "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2", "v8-compile-cache@^2.0.3": "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee", - "v8-to-istanbul@^7.0.0": "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-7.1.2.tgz#30898d1a7fa0c84d225a2c1434fb958f290883c1", + "v8-to-istanbul@^8.0.0": "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-8.0.0.tgz#4229f2a99e367f3f018fa1d5c2b8ec684667c69c", "validate-npm-package-license@^3.0.1": "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a", - "verror@1.10.0": "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400", - "vfile-location@^2.0.0": "https://registry.yarnpkg.com/vfile-location/-/vfile-location-2.0.6.tgz#8a274f39411b8719ea5728802e10d9e0dff1519e", - "vfile-message@^1.0.0": "https://registry.yarnpkg.com/vfile-message/-/vfile-message-1.1.1.tgz#5833ae078a1dfa2d96e9647886cd32993ab313e1", - "vfile@^2.0.0": "https://registry.yarnpkg.com/vfile/-/vfile-2.3.0.tgz#e62d8e72b20e83c324bc6c67278ee272488bf84a", + "vfile-message@^2.0.0": "https://registry.yarnpkg.com/vfile-message/-/vfile-message-2.0.4.tgz#5b43b88171d409eae58477d13f23dd41d52c371a", + "vfile@^4.0.0": "https://registry.yarnpkg.com/vfile/-/vfile-4.2.1.tgz#03f1dce28fc625c625bc6514350fbdb00fa9e624", "w3c-hr-time@^1.0.2": "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd", "w3c-xmlserializer@^2.0.0": "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a", "walker@^1.0.7": "https://registry.yarnpkg.com/walker/-/walker-1.0.7.tgz#2f7f9b8fd10d677262b18a884e28d19618e028fb", - "walker@~1.0.5": "https://registry.yarnpkg.com/walker/-/walker-1.0.7.tgz#2f7f9b8fd10d677262b18a884e28d19618e028fb", "webidl-conversions@^5.0.0": "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff", "webidl-conversions@^6.1.0": "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514", "whatwg-encoding@^1.0.5": "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0", "whatwg-mimetype@^2.3.0": "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf", - "whatwg-url@^8.0.0": "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.5.0.tgz#7752b8464fc0903fec89aa9846fc9efe07351fd3", - "whatwg-url@^8.5.0": "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.5.0.tgz#7752b8464fc0903fec89aa9846fc9efe07351fd3", - "which-module@^2.0.0": "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a", - "which@^1.2.9": "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a", + "whatwg-url@^8.0.0": "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.6.0.tgz#27c0205a4902084b872aecb97cf0f2a7a3011f4c", + "whatwg-url@^8.5.0": "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.6.0.tgz#27c0205a4902084b872aecb97cf0f2a7a3011f4c", "which@^2.0.1": "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1", - "which@^2.0.2": "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1", "word-wrap@^1.2.3": "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c", "word-wrap@~1.2.3": "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c", "wrap-ansi@^6.2.0": "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53", "wrap-ansi@^7.0.0": "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43", "wrappy@1": "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f", "write-file-atomic@^3.0.0": "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8", - "ws@^7.4.4": "https://registry.yarnpkg.com/ws/-/ws-7.4.5.tgz#a484dd851e9beb6fdb420027e3885e8ce48986c1", - "x-is-string@^0.1.0": "https://registry.yarnpkg.com/x-is-string/-/x-is-string-0.1.0.tgz#474b50865af3a49a9c4657f05acd145458f77d82", + "ws@^7.4.5": "https://registry.yarnpkg.com/ws/-/ws-7.5.0.tgz#0033bafea031fb9df041b2026fc72a571ca44691", "xml-name-validator@^3.0.0": "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a", "xmlchars@^2.2.0": "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb", - "xtend@^4.0.0": "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54", - "xtend@^4.0.1": "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54", - "y18n@^4.0.0": "https://registry.yarnpkg.com/y18n/-/y18n-4.0.3.tgz#b5f259c82cd6e336921efd7bfd8bf560de9eeedf", "y18n@^5.0.5": "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55", "yallist@^4.0.0": "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72", "yaml@^1.10.0": "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b", - "yargs-parser@20.x": "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.7.tgz#61df85c113edfb5a7a4e36eb8aa60ef423cbc90a", - "yargs-parser@^18.1.2": "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0", - "yargs-parser@^20.2.2": "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.7.tgz#61df85c113edfb5a7a4e36eb8aa60ef423cbc90a", - "yargs-parser@^20.2.3": "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.7.tgz#61df85c113edfb5a7a4e36eb8aa60ef423cbc90a", - "yargs@^15.4.1": "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8", + "yargs-parser@20.x": "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee", + "yargs-parser@^20.2.2": "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee", + "yargs-parser@^20.2.3": "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee", + "yargs@^16.0.3": "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66", "yargs@^16.2.0": "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66", - "yocto-queue@^0.1.0": "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + "yocto-queue@^0.1.0": "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b", + "zwitch@^1.0.0": "https://registry.yarnpkg.com/zwitch/-/zwitch-1.0.5.tgz#d11d7381ffed16b742f6af7b3f223d5cd9fe9920" }, "files": [], "artifacts": {} diff --git a/node_modules/@actions/core/README.md b/node_modules/@actions/core/README.md index 864d577e..deffaa5d 100644 --- a/node_modules/@actions/core/README.md +++ b/node_modules/@actions/core/README.md @@ -16,11 +16,14 @@ import * as core from '@actions/core'; #### Inputs/Outputs -Action inputs can be read with `getInput`. Outputs can be set with `setOutput` which makes them available to be mapped into inputs of other actions to ensure they are decoupled. +Action inputs can be read with `getInput` which returns a `string` or `getBooleanInput` which parses a boolean based on the [yaml 1.2 specification](https://yaml.org/spec/1.2/spec.html#id2804923). If `required` set to be false, the input should have a default value in `action.yml`. + +Outputs can be set with `setOutput` which makes them available to be mapped into inputs of other actions to ensure they are decoupled. ```js const myInput = core.getInput('inputName', { required: true }); - +const myBooleanInput = core.getBooleanInput('booleanInputName', { required: true }); +const myMultilineInput = core.getMultilineInput('multilineInputName', { required: true }); core.setOutput('outputKey', 'outputVal'); ``` @@ -66,7 +69,6 @@ catch (err) { Note that `setNeutral` is not yet implemented in actions V2 but equivalent functionality is being planned. - #### Logging Finally, this library provides some utilities for logging. Note that debug logging is hidden from the logs by default. This behavior can be toggled by enabling the [Step Debug Logs](../../docs/action-debugging.md#step-debug-logs). @@ -118,6 +120,7 @@ const result = await core.group('Do something async', async () => { Colored output is supported in the Action logs via standard [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code). 3/4 bit, 8 bit and 24 bit colors are all supported. Foreground colors: + ```js // 3/4 bit core.info('\u001b[35mThis foreground will be magenta') @@ -130,6 +133,7 @@ core.info('\u001b[38;2;255;0;0mThis foreground will be bright red') ``` Background colors: + ```js // 3/4 bit core.info('\u001b[43mThis background will be yellow'); @@ -156,6 +160,7 @@ core.info('\u001b[31;46mRed foreground with a cyan background and \u001b[1mbold ``` > Note: Escape codes reset at the start of each line + ```js core.info('\u001b[35mThis foreground will be magenta') core.info('This foreground will reset to the default') @@ -170,9 +175,10 @@ core.info(style.color.ansi16m.hex('#abcdef') + 'Hello world!') #### Action state -You can use this library to save state and get state for sharing information between a given wrapper action: +You can use this library to save state and get state for sharing information between a given wrapper action: + +**action.yml**: -**action.yml** ```yaml name: 'Wrapper action sample' inputs: @@ -193,6 +199,7 @@ core.saveState("pidToKill", 12345); ``` In action's `cleanup.js`: + ```js const core = require('@actions/core'); diff --git a/node_modules/@actions/core/lib/command.js b/node_modules/@actions/core/lib/command.js index 10bf3ebb..0b28c66b 100644 --- a/node_modules/@actions/core/lib/command.js +++ b/node_modules/@actions/core/lib/command.js @@ -1,12 +1,25 @@ "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", { value: true }); +exports.issue = exports.issueCommand = void 0; const os = __importStar(require("os")); const utils_1 = require("./utils"); /** diff --git a/node_modules/@actions/core/lib/command.js.map b/node_modules/@actions/core/lib/command.js.map index a95b303b..51c7c637 100644 --- a/node_modules/@actions/core/lib/command.js.map +++ b/node_modules/@actions/core/lib/command.js.map @@ -1 +1 @@ -{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAwB;AACxB,mCAAsC;AAWtC;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAY;IAEZ,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,UAAkB,EAAE;IACtD,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,IAAI,CAAA;AAEvB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,IAAI,KAAK,GAAG,IAAI,CAAA;YAChB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,IAAI,KAAK,EAAE;4BACT,KAAK,GAAG,KAAK,CAAA;yBACd;6BAAM;4BACL,MAAM,IAAI,GAAG,CAAA;yBACd;wBAED,MAAM,IAAI,GAAG,GAAG,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAA;qBAC1C;iBACF;aACF;SACF;QAED,MAAM,IAAI,GAAG,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAA;QACpD,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED,SAAS,UAAU,CAAC,CAAM;IACxB,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AAC1B,CAAC;AAED,SAAS,cAAc,CAAC,CAAM;IAC5B,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"} \ No newline at end of file +{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,mCAAsC;AAWtC;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAY;IAEZ,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,OAAO,GAAG,EAAE;IAC9C,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,IAAI,CAAA;AAEvB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,IAAI,KAAK,GAAG,IAAI,CAAA;YAChB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,IAAI,KAAK,EAAE;4BACT,KAAK,GAAG,KAAK,CAAA;yBACd;6BAAM;4BACL,MAAM,IAAI,GAAG,CAAA;yBACd;wBAED,MAAM,IAAI,GAAG,GAAG,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAA;qBAC1C;iBACF;aACF;SACF;QAED,MAAM,IAAI,GAAG,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAA;QACpD,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED,SAAS,UAAU,CAAC,CAAM;IACxB,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AAC1B,CAAC;AAED,SAAS,cAAc,CAAC,CAAM;IAC5B,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/core.d.ts b/node_modules/@actions/core/lib/core.d.ts index 8bb5093c..67352513 100644 --- a/node_modules/@actions/core/lib/core.d.ts +++ b/node_modules/@actions/core/lib/core.d.ts @@ -4,6 +4,8 @@ export interface InputOptions { /** Optional. Whether the input is required. If required and not present, will throw. Defaults to false */ required?: boolean; + /** Optional. Whether leading/trailing whitespace will be trimmed for the input. Defaults to true */ + trimWhitespace?: boolean; } /** * The code to exit an action @@ -35,13 +37,35 @@ export declare function setSecret(secret: string): void; */ export declare function addPath(inputPath: string): void; /** - * Gets the value of an input. The value is also trimmed. + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. * * @param name name of the input to get * @param options optional. See InputOptions. * @returns string */ export declare function getInput(name: string, options?: InputOptions): string; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +export declare function getMultilineInput(name: string, options?: InputOptions): string[]; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +export declare function getBooleanInput(name: string, options?: InputOptions): boolean; /** * Sets the value of an output. * diff --git a/node_modules/@actions/core/lib/core.js b/node_modules/@actions/core/lib/core.js index 9bf191a2..f9dbee38 100644 --- a/node_modules/@actions/core/lib/core.js +++ b/node_modules/@actions/core/lib/core.js @@ -1,4 +1,23 @@ "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -8,14 +27,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", { value: true }); +exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; const command_1 = require("./command"); const file_command_1 = require("./file-command"); const utils_1 = require("./utils"); @@ -82,7 +95,9 @@ function addPath(inputPath) { } exports.addPath = addPath; /** - * Gets the value of an input. The value is also trimmed. + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. * * @param name name of the input to get * @param options optional. See InputOptions. @@ -93,9 +108,49 @@ function getInput(name, options) { if (options && options.required && !val) { throw new Error(`Input required and not supplied: ${name}`); } + if (options && options.trimWhitespace === false) { + return val; + } return val.trim(); } exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + return inputs; +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; /** * Sets the value of an output. * diff --git a/node_modules/@actions/core/lib/core.js.map b/node_modules/@actions/core/lib/core.js.map index 0198697a..8bd42c0d 100644 --- a/node_modules/@actions/core/lib/core.js.map +++ b/node_modules/@actions/core/lib/core.js.map @@ -1 +1 @@ -{"version":3,"file":"core.js","sourceRoot":"","sources":["../src/core.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,uCAA6C;AAC7C,iDAA+D;AAC/D,mCAAsC;AAEtC,uCAAwB;AACxB,2CAA4B;AAU5B;;GAEG;AACH,IAAY,QAUX;AAVD,WAAY,QAAQ;IAClB;;OAEG;IACH,6CAAW,CAAA;IAEX;;OAEG;IACH,6CAAW,CAAA;AACb,CAAC,EAVW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAUnB;AAED,yEAAyE;AACzE,YAAY;AACZ,yEAAyE;AAEzE;;;;GAIG;AACH,8DAA8D;AAC9D,SAAgB,cAAc,CAAC,IAAY,EAAE,GAAQ;IACnD,MAAM,YAAY,GAAG,sBAAc,CAAC,GAAG,CAAC,CAAA;IACxC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,YAAY,CAAA;IAEhC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE,CAAA;IAChD,IAAI,QAAQ,EAAE;QACZ,MAAM,SAAS,GAAG,qCAAqC,CAAA;QACvD,MAAM,YAAY,GAAG,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE,CAAC,GAAG,GAAG,YAAY,GAAG,EAAE,CAAC,GAAG,GAAG,SAAS,EAAE,CAAA;QACzF,2BAAgB,CAAC,KAAK,EAAE,YAAY,CAAC,CAAA;KACtC;SAAM;QACL,sBAAY,CAAC,SAAS,EAAE,EAAC,IAAI,EAAC,EAAE,YAAY,CAAC,CAAA;KAC9C;AACH,CAAC;AAZD,wCAYC;AAED;;;GAGG;AACH,SAAgB,SAAS,CAAC,MAAc;IACtC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,MAAM,CAAC,CAAA;AACtC,CAAC;AAFD,8BAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,SAAiB;IACvC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;IACjD,IAAI,QAAQ,EAAE;QACZ,2BAAgB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAA;KACpC;SAAM;QACL,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,SAAS,CAAC,CAAA;KACxC;IACD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAA;AAC7E,CAAC;AARD,0BAQC;AAED;;;;;;GAMG;AACH,SAAgB,QAAQ,CAAC,IAAY,EAAE,OAAsB;IAC3D,MAAM,GAAG,GACP,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,IAAI,EAAE,CAAA;IACrE,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,GAAG,EAAE;QACvC,MAAM,IAAI,KAAK,CAAC,oCAAoC,IAAI,EAAE,CAAC,CAAA;KAC5D;IAED,OAAO,GAAG,CAAC,IAAI,EAAE,CAAA;AACnB,CAAC;AARD,4BAQC;AAED;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;IAC5B,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAHD,8BAGC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,OAAgB;IAC7C,eAAK,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAA;AACvC,CAAC;AAFD,wCAEC;AAED,yEAAyE;AACzE,UAAU;AACV,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,SAAS,CAAC,OAAuB;IAC/C,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAA;IAEnC,KAAK,CAAC,OAAO,CAAC,CAAA;AAChB,CAAC;AAJD,8BAIC;AAED,yEAAyE;AACzE,mBAAmB;AACnB,yEAAyE;AAEzE;;GAEG;AACH,SAAgB,OAAO;IACrB,OAAO,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,GAAG,CAAA;AAC5C,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,sBAAY,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACpC,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAuB;IAC3C,eAAK,CAAC,OAAO,EAAE,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAA;AACzE,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,OAAuB;IAC7C,eAAK,CAAC,SAAS,EAAE,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAA;AAC3E,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,IAAI,CAAC,OAAe;IAClC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,oBAEC;AAED;;;;;;GAMG;AACH,SAAgB,UAAU,CAAC,IAAY;IACrC,eAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;AACtB,CAAC;AAFD,gCAEC;AAED;;GAEG;AACH,SAAgB,QAAQ;IACtB,eAAK,CAAC,UAAU,CAAC,CAAA;AACnB,CAAC;AAFD,4BAEC;AAED;;;;;;;GAOG;AACH,SAAsB,KAAK,CAAI,IAAY,EAAE,EAAoB;;QAC/D,UAAU,CAAC,IAAI,CAAC,CAAA;QAEhB,IAAI,MAAS,CAAA;QAEb,IAAI;YACF,MAAM,GAAG,MAAM,EAAE,EAAE,CAAA;SACpB;gBAAS;YACR,QAAQ,EAAE,CAAA;SACX;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAZD,sBAYC;AAED,yEAAyE;AACzE,uBAAuB;AACvB,yEAAyE;AAEzE;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CAAC,IAAY;IACnC,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,EAAE,CAAC,IAAI,EAAE,CAAA;AAC3C,CAAC;AAFD,4BAEC"} \ No newline at end of file +{"version":3,"file":"core.js","sourceRoot":"","sources":["../src/core.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAA6C;AAC7C,iDAA+D;AAC/D,mCAAsC;AAEtC,uCAAwB;AACxB,2CAA4B;AAa5B;;GAEG;AACH,IAAY,QAUX;AAVD,WAAY,QAAQ;IAClB;;OAEG;IACH,6CAAW,CAAA;IAEX;;OAEG;IACH,6CAAW,CAAA;AACb,CAAC,EAVW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAUnB;AAED,yEAAyE;AACzE,YAAY;AACZ,yEAAyE;AAEzE;;;;GAIG;AACH,8DAA8D;AAC9D,SAAgB,cAAc,CAAC,IAAY,EAAE,GAAQ;IACnD,MAAM,YAAY,GAAG,sBAAc,CAAC,GAAG,CAAC,CAAA;IACxC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,YAAY,CAAA;IAEhC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE,CAAA;IAChD,IAAI,QAAQ,EAAE;QACZ,MAAM,SAAS,GAAG,qCAAqC,CAAA;QACvD,MAAM,YAAY,GAAG,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE,CAAC,GAAG,GAAG,YAAY,GAAG,EAAE,CAAC,GAAG,GAAG,SAAS,EAAE,CAAA;QACzF,2BAAgB,CAAC,KAAK,EAAE,YAAY,CAAC,CAAA;KACtC;SAAM;QACL,sBAAY,CAAC,SAAS,EAAE,EAAC,IAAI,EAAC,EAAE,YAAY,CAAC,CAAA;KAC9C;AACH,CAAC;AAZD,wCAYC;AAED;;;GAGG;AACH,SAAgB,SAAS,CAAC,MAAc;IACtC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,MAAM,CAAC,CAAA;AACtC,CAAC;AAFD,8BAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,SAAiB;IACvC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;IACjD,IAAI,QAAQ,EAAE;QACZ,2BAAgB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAA;KACpC;SAAM;QACL,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,SAAS,CAAC,CAAA;KACxC;IACD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAA;AAC7E,CAAC;AARD,0BAQC;AAED;;;;;;;;GAQG;AACH,SAAgB,QAAQ,CAAC,IAAY,EAAE,OAAsB;IAC3D,MAAM,GAAG,GACP,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,IAAI,EAAE,CAAA;IACrE,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,GAAG,EAAE;QACvC,MAAM,IAAI,KAAK,CAAC,oCAAoC,IAAI,EAAE,CAAC,CAAA;KAC5D;IAED,IAAI,OAAO,IAAI,OAAO,CAAC,cAAc,KAAK,KAAK,EAAE;QAC/C,OAAO,GAAG,CAAA;KACX;IAED,OAAO,GAAG,CAAC,IAAI,EAAE,CAAA;AACnB,CAAC;AAZD,4BAYC;AAED;;;;;;;GAOG;AACH,SAAgB,iBAAiB,CAC/B,IAAY,EACZ,OAAsB;IAEtB,MAAM,MAAM,GAAa,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC;SAC7C,KAAK,CAAC,IAAI,CAAC;SACX,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IAExB,OAAO,MAAM,CAAA;AACf,CAAC;AATD,8CASC;AAED;;;;;;;;;GASG;AACH,SAAgB,eAAe,CAAC,IAAY,EAAE,OAAsB;IAClE,MAAM,SAAS,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAA;IAC1C,MAAM,UAAU,GAAG,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IAC9C,MAAM,GAAG,GAAG,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACnC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,IAAI,CAAA;IACxC,IAAI,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,KAAK,CAAA;IAC1C,MAAM,IAAI,SAAS,CACjB,6DAA6D,IAAI,IAAI;QACnE,4EAA4E,CAC/E,CAAA;AACH,CAAC;AAVD,0CAUC;AAED;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;IAC5B,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAHD,8BAGC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,OAAgB;IAC7C,eAAK,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAA;AACvC,CAAC;AAFD,wCAEC;AAED,yEAAyE;AACzE,UAAU;AACV,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,SAAS,CAAC,OAAuB;IAC/C,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAA;IAEnC,KAAK,CAAC,OAAO,CAAC,CAAA;AAChB,CAAC;AAJD,8BAIC;AAED,yEAAyE;AACzE,mBAAmB;AACnB,yEAAyE;AAEzE;;GAEG;AACH,SAAgB,OAAO;IACrB,OAAO,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,GAAG,CAAA;AAC5C,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,sBAAY,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACpC,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAuB;IAC3C,eAAK,CAAC,OAAO,EAAE,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAA;AACzE,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,OAAuB;IAC7C,eAAK,CAAC,SAAS,EAAE,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAA;AAC3E,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,IAAI,CAAC,OAAe;IAClC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,oBAEC;AAED;;;;;;GAMG;AACH,SAAgB,UAAU,CAAC,IAAY;IACrC,eAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;AACtB,CAAC;AAFD,gCAEC;AAED;;GAEG;AACH,SAAgB,QAAQ;IACtB,eAAK,CAAC,UAAU,CAAC,CAAA;AACnB,CAAC;AAFD,4BAEC;AAED;;;;;;;GAOG;AACH,SAAsB,KAAK,CAAI,IAAY,EAAE,EAAoB;;QAC/D,UAAU,CAAC,IAAI,CAAC,CAAA;QAEhB,IAAI,MAAS,CAAA;QAEb,IAAI;YACF,MAAM,GAAG,MAAM,EAAE,EAAE,CAAA;SACpB;gBAAS;YACR,QAAQ,EAAE,CAAA;SACX;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAZD,sBAYC;AAED,yEAAyE;AACzE,uBAAuB;AACvB,yEAAyE;AAEzE;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CAAC,IAAY;IACnC,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,EAAE,CAAC,IAAI,EAAE,CAAA;AAC3C,CAAC;AAFD,4BAEC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/file-command.js b/node_modules/@actions/core/lib/file-command.js index 10783c0c..55e3e9f8 100644 --- a/node_modules/@actions/core/lib/file-command.js +++ b/node_modules/@actions/core/lib/file-command.js @@ -1,13 +1,26 @@ "use strict"; // For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", { value: true }); +exports.issueCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ const fs = __importStar(require("fs")); diff --git a/node_modules/@actions/core/lib/file-command.js.map b/node_modules/@actions/core/lib/file-command.js.map index 45fd8c4b..ee35699f 100644 --- a/node_modules/@actions/core/lib/file-command.js.map +++ b/node_modules/@actions/core/lib/file-command.js.map @@ -1 +1 @@ -{"version":3,"file":"file-command.js","sourceRoot":"","sources":["../src/file-command.ts"],"names":[],"mappings":";AAAA,uCAAuC;;;;;;;;;AAEvC,mCAAmC;AACnC,uDAAuD;AAEvD,uCAAwB;AACxB,uCAAwB;AACxB,mCAAsC;AAEtC,SAAgB,YAAY,CAAC,OAAe,EAAE,OAAY;IACxD,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,OAAO,EAAE,CAAC,CAAA;IACjD,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,IAAI,KAAK,CACb,wDAAwD,OAAO,EAAE,CAClE,CAAA;KACF;IACD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;QAC5B,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAA;KACrD;IAED,EAAE,CAAC,cAAc,CAAC,QAAQ,EAAE,GAAG,sBAAc,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,EAAE;QACjE,QAAQ,EAAE,MAAM;KACjB,CAAC,CAAA;AACJ,CAAC;AAdD,oCAcC"} \ No newline at end of file +{"version":3,"file":"file-command.js","sourceRoot":"","sources":["../src/file-command.ts"],"names":[],"mappings":";AAAA,uCAAuC;;;;;;;;;;;;;;;;;;;;;;AAEvC,mCAAmC;AACnC,uDAAuD;AAEvD,uCAAwB;AACxB,uCAAwB;AACxB,mCAAsC;AAEtC,SAAgB,YAAY,CAAC,OAAe,EAAE,OAAY;IACxD,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,OAAO,EAAE,CAAC,CAAA;IACjD,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,IAAI,KAAK,CACb,wDAAwD,OAAO,EAAE,CAClE,CAAA;KACF;IACD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;QAC5B,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAA;KACrD;IAED,EAAE,CAAC,cAAc,CAAC,QAAQ,EAAE,GAAG,sBAAc,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,EAAE;QACjE,QAAQ,EAAE,MAAM;KACjB,CAAC,CAAA;AACJ,CAAC;AAdD,oCAcC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/utils.js b/node_modules/@actions/core/lib/utils.js index 97cea339..e83052ed 100644 --- a/node_modules/@actions/core/lib/utils.js +++ b/node_modules/@actions/core/lib/utils.js @@ -2,6 +2,7 @@ // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ Object.defineProperty(exports, "__esModule", { value: true }); +exports.toCommandValue = void 0; /** * Sanitizes an input into a string so it can be passed into issueCommand safely * @param input input to sanitize into a string diff --git a/node_modules/@actions/core/lib/utils.js.map b/node_modules/@actions/core/lib/utils.js.map index ce43f037..6b68d957 100644 --- a/node_modules/@actions/core/lib/utils.js.map +++ b/node_modules/@actions/core/lib/utils.js.map @@ -1 +1 @@ -{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";AAAA,mCAAmC;AACnC,uDAAuD;;AAEvD;;;GAGG;AACH,SAAgB,cAAc,CAAC,KAAU;IACvC,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,OAAO,EAAE,CAAA;KACV;SAAM,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,YAAY,MAAM,EAAE;QAC/D,OAAO,KAAe,CAAA;KACvB;IACD,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAA;AAC9B,CAAC;AAPD,wCAOC"} \ No newline at end of file +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";AAAA,mCAAmC;AACnC,uDAAuD;;;AAEvD;;;GAGG;AACH,SAAgB,cAAc,CAAC,KAAU;IACvC,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,OAAO,EAAE,CAAA;KACV;SAAM,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,YAAY,MAAM,EAAE;QAC/D,OAAO,KAAe,CAAA;KACvB;IACD,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAA;AAC9B,CAAC;AAPD,wCAOC"} \ No newline at end of file diff --git a/node_modules/@actions/core/package.json b/node_modules/@actions/core/package.json index a0e72b31..94037492 100644 --- a/node_modules/@actions/core/package.json +++ b/node_modules/@actions/core/package.json @@ -1,6 +1,6 @@ { "name": "@actions/core", - "version": "1.2.7", + "version": "1.4.0", "description": "Actions core lib", "keywords": [ "github", diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/LICENSE b/node_modules/@actions/github/LICENSE.md similarity index 95% rename from node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/LICENSE rename to node_modules/@actions/github/LICENSE.md index c61fbbe5..dbae2edb 100644 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/LICENSE +++ b/node_modules/@actions/github/LICENSE.md @@ -1,4 +1,6 @@ -Copyright 2020 Gregor Martynus +The MIT License (MIT) + +Copyright 2019 GitHub Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: diff --git a/node_modules/@actions/github/README.md b/node_modules/@actions/github/README.md index 02e9be07..21daad31 100644 --- a/node_modules/@actions/github/README.md +++ b/node_modules/@actions/github/README.md @@ -22,7 +22,7 @@ async function run() { // You can also pass in additional options as a second parameter to getOctokit // const octokit = github.getOctokit(myToken, {userAgent: "MyActionVersion1"}); - const { data: pullRequest } = await octokit.pulls.get({ + const { data: pullRequest } = await octokit.rest.pulls.get({ owner: 'octokit', repo: 'rest.js', pull_number: 123, @@ -50,7 +50,7 @@ const github = require('@actions/github'); const context = github.context; -const newIssue = await octokit.issues.create({ +const newIssue = await octokit.rest.issues.create({ ...context.repo, title: 'New issue!', body: 'Hello Universe!' @@ -90,7 +90,7 @@ const octokit = GitHub.plugin(enterpriseServer220Admin) const myToken = core.getInput('myToken'); const myOctokit = new octokit(getOctokitOptions(token)) // Create a new user -myOctokit.enterpriseAdmin.createUser({ +myOctokit.rest.enterpriseAdmin.createUser({ login: "testuser", email: "testuser@test.com", }); diff --git a/node_modules/@actions/github/lib/context.d.ts b/node_modules/@actions/github/lib/context.d.ts index daab690c..7d3a7de4 100644 --- a/node_modules/@actions/github/lib/context.d.ts +++ b/node_modules/@actions/github/lib/context.d.ts @@ -13,6 +13,9 @@ export declare class Context { job: string; runNumber: number; runId: number; + apiUrl: string; + serverUrl: string; + graphqlUrl: string; /** * Hydrate the context from the environment */ diff --git a/node_modules/@actions/github/lib/context.js b/node_modules/@actions/github/lib/context.js index dd4d10a0..767933ce 100644 --- a/node_modules/@actions/github/lib/context.js +++ b/node_modules/@actions/github/lib/context.js @@ -8,6 +8,7 @@ class Context { * Hydrate the context from the environment */ constructor() { + var _a, _b, _c; this.payload = {}; if (process.env.GITHUB_EVENT_PATH) { if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { @@ -27,6 +28,9 @@ class Context { this.job = process.env.GITHUB_JOB; this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } get issue() { const payload = this.payload; diff --git a/node_modules/@actions/github/lib/context.js.map b/node_modules/@actions/github/lib/context.js.map index 9c4eafe1..91fb9a9d 100644 --- a/node_modules/@actions/github/lib/context.js.map +++ b/node_modules/@actions/github/lib/context.js.map @@ -1 +1 @@ -{"version":3,"file":"context.js","sourceRoot":"","sources":["../src/context.ts"],"names":[],"mappings":";;;AAEA,2BAA2C;AAC3C,2BAAsB;AAEtB,MAAa,OAAO;IAgBlB;;OAEG;IACH;QACE,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;QACjB,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,IAAI,eAAU,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,KAAK,CACvB,iBAAY,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAChE,CAAA;aACF;iBAAM;gBACL,MAAM,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAA;gBAC1C,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,IAAI,kBAAkB,QAAG,EAAE,CAAC,CAAA;aACvE;SACF;QACD,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,iBAA2B,CAAA;QACxD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,eAAyB,CAAA;QACrD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,aAAuB,CAAA;QACjD,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,YAAsB,CAAA;QAC/C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,iBAA2B,EAAE,EAAE,CAAC,CAAA;QACtE,IAAI,CAAC,KAAK,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,aAAuB,EAAE,EAAE,CAAC,CAAA;IAChE,CAAC;IAED,IAAI,KAAK;QACP,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAA;QAE5B,uCACK,IAAI,CAAC,IAAI,KACZ,MAAM,EAAE,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,YAAY,IAAI,OAAO,CAAC,CAAC,MAAM,IAClE;IACH,CAAC;IAED,IAAI,IAAI;QACN,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YAC9D,OAAO,EAAC,KAAK,EAAE,IAAI,EAAC,CAAA;SACrB;QAED,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YAC3B,OAAO;gBACL,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK;gBAC1C,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI;aACnC,CAAA;SACF;QAED,MAAM,IAAI,KAAK,CACb,kFAAkF,CACnF,CAAA;IACH,CAAC;CACF;AApED,0BAoEC"} \ No newline at end of file +{"version":3,"file":"context.js","sourceRoot":"","sources":["../src/context.ts"],"names":[],"mappings":";;;AAEA,2BAA2C;AAC3C,2BAAsB;AAEtB,MAAa,OAAO;IAmBlB;;OAEG;IACH;;QACE,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;QACjB,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,IAAI,eAAU,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,KAAK,CACvB,iBAAY,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAChE,CAAA;aACF;iBAAM;gBACL,MAAM,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAA;gBAC1C,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,IAAI,kBAAkB,QAAG,EAAE,CAAC,CAAA;aACvE;SACF;QACD,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,iBAA2B,CAAA;QACxD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,eAAyB,CAAA;QACrD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,aAAuB,CAAA;QACjD,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,YAAsB,CAAA;QAC/C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,iBAA2B,EAAE,EAAE,CAAC,CAAA;QACtE,IAAI,CAAC,KAAK,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,aAAuB,EAAE,EAAE,CAAC,CAAA;QAC9D,IAAI,CAAC,MAAM,SAAG,OAAO,CAAC,GAAG,CAAC,cAAc,mCAAI,wBAAwB,CAAA;QACpE,IAAI,CAAC,SAAS,SAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,mCAAI,oBAAoB,CAAA;QACtE,IAAI,CAAC,UAAU,SACb,OAAO,CAAC,GAAG,CAAC,kBAAkB,mCAAI,gCAAgC,CAAA;IACtE,CAAC;IAED,IAAI,KAAK;QACP,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAA;QAE5B,uCACK,IAAI,CAAC,IAAI,KACZ,MAAM,EAAE,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,YAAY,IAAI,OAAO,CAAC,CAAC,MAAM,IAClE;IACH,CAAC;IAED,IAAI,IAAI;QACN,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YAC9D,OAAO,EAAC,KAAK,EAAE,IAAI,EAAC,CAAA;SACrB;QAED,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YAC3B,OAAO;gBACL,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK;gBAC1C,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI;aACnC,CAAA;SACF;QAED,MAAM,IAAI,KAAK,CACb,kFAAkF,CACnF,CAAA;IACH,CAAC;CACF;AA3ED,0BA2EC"} \ No newline at end of file diff --git a/node_modules/@actions/github/lib/github.js b/node_modules/@actions/github/lib/github.js index e30e81ec..f02c9fb6 100644 --- a/node_modules/@actions/github/lib/github.js +++ b/node_modules/@actions/github/lib/github.js @@ -14,7 +14,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; diff --git a/node_modules/@actions/github/lib/internal/utils.js b/node_modules/@actions/github/lib/internal/utils.js index 197a441e..175a4dae 100644 --- a/node_modules/@actions/github/lib/internal/utils.js +++ b/node_modules/@actions/github/lib/internal/utils.js @@ -14,7 +14,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; diff --git a/node_modules/@actions/github/lib/utils.d.ts b/node_modules/@actions/github/lib/utils.d.ts index 0015a35f..fe28cbd3 100644 --- a/node_modules/@actions/github/lib/utils.d.ts +++ b/node_modules/@actions/github/lib/utils.d.ts @@ -2,14 +2,7 @@ import * as Context from './context'; import { Octokit } from '@octokit/core'; import { OctokitOptions } from '@octokit/core/dist-types/types'; export declare const context: Context.Context; -export declare const GitHub: (new (...args: any[]) => { - [x: string]: any; -}) & { - new (...args: any[]): { - [x: string]: any; - }; - plugins: any[]; -} & typeof Octokit & import("@octokit/core/dist-types/types").Constructor; /** diff --git a/node_modules/@actions/github/lib/utils.js b/node_modules/@actions/github/lib/utils.js index b066c225..afb40e95 100644 --- a/node_modules/@actions/github/lib/utils.js +++ b/node_modules/@actions/github/lib/utils.js @@ -14,7 +14,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/README.md b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/README.md deleted file mode 100644 index f4a7bbd8..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/README.md +++ /dev/null @@ -1,74 +0,0 @@ -# plugin-rest-endpoint-methods.js - -> Octokit plugin adding one method for all of api.github.com REST API endpoints - -[![@latest](https://img.shields.io/npm/v/@octokit/plugin-rest-endpoint-methods.svg)](https://www.npmjs.com/package/@octokit/plugin-rest-endpoint-methods) -[![Build Status](https://github.com/octokit/plugin-rest-endpoint-methods.js/workflows/Test/badge.svg)](https://github.com/octokit/plugin-rest-endpoint-methods.js/actions?workflow=Test) - -## Usage - - - - - - -
-Browsers - - -Load `@octokit/plugin-rest-endpoint-methods` and [`@octokit/core`](https://github.com/octokit/core.js) (or core-compatible module) directly from [cdn.skypack.dev](https://cdn.skypack.dev) - -```html - -``` - -
-Node - - -Install with `npm install @octokit/core @octokit/plugin-rest-endpoint-methods`. Optionally replace `@octokit/core` with a compatible module - -```js -const { Octokit } = require("@octokit/core"); -const { - restEndpointMethods, -} = require("@octokit/plugin-rest-endpoint-methods"); -``` - -
- -```js -const MyOctokit = Octokit.plugin(restEndpointMethods); -const octokit = new MyOctokit({ auth: "secret123" }); - -// https://developer.github.com/v3/users/#get-the-authenticated-user -octokit.rest.users.getAuthenticated(); -``` - -There is one method for each REST API endpoint documented at [https://developer.github.com/v3](https://developer.github.com/v3). All endpoint methods are documented in the [docs/](docs/) folder, e.g. [docs/users/getAuthenticated.md](docs/users/getAuthenticated.md) - -## TypeScript - -Parameter and response types for all endpoint methods exported as `{ RestEndpointMethodTypes }`. - -Example - -```ts -import { RestEndpointMethodTypes } from "@octokit/plugin-rest-endpoint-methods"; - -type UpdateLabelParameters = RestEndpointMethodTypes["issues"]["updateLabel"]["parameters"]; -type UpdateLabelResponse = RestEndpointMethodTypes["issues"]["updateLabel"]["response"]; -``` - -In order to get types beyond parameters and responses, check out [`@octokit/openapi-types`](https://github.com/octokit/openapi-types.ts/#readme), which is a direct transpliation from GitHub's official OpenAPI specification. - -## Contributing - -See [CONTRIBUTING.md](CONTRIBUTING.md) - -## License - -[MIT](LICENSE) diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js deleted file mode 100644 index abffde80..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js +++ /dev/null @@ -1,1229 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, '__esModule', { value: true }); - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - if (enumerableOnly) symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; - - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - } - - return target; -} - -const Endpoints = { - actions: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], - disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], - enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], - getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], - getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], - getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], - getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { - renamed: ["actions", "getGithubActionsPermissionsRepository"] - }], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], - setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], - setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], - setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], - setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], - checkToken: ["POST /applications/{client_id}/token"], - createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { - mediaType: { - previews: ["corsair"] - } - }], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], - getUserInstallation: ["GET /users/{username}/installation"], - getWebhookConfigForApp: ["GET /app/hook/config"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - scopeToken: ["POST /applications/{client_id}/token/scoped"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], - updateWebhookConfigForApp: ["PATCH /app/hook/config"] - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], - getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], - getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs"], - createSuite: ["POST /repos/{owner}/{repo}/check-suites"], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] - }, - codeScanning: { - deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { - renamedParameters: { - alert_id: "alert_number" - } - }], - getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], - getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], - listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], - uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] - } - }], - getConductCode: ["GET /codes_of_conduct/{key}", { - mediaType: { - previews: ["scarlet-witch"] - } - }], - getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] - } - }] - }, - emojis: { - get: ["GET /emojis"] - }, - enterpriseAdmin: { - disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], - getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], - getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], - listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], - setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], - setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], - setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] - }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"] - }, - interactions: { - getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { - renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] - }], - removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], - removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { - renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] - }], - setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { - renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] - }] - }, - issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { - mediaType: { - previews: ["mockingbird"] - } - }], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"] - }, - markdown: { - render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" - } - }] - }, - meta: { - get: ["GET /meta"], - getOctocat: ["GET /octocat"], - getZen: ["GET /zen"], - root: ["GET /"] - }, - migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] - } - }], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] - } - }], - listForAuthenticatedUser: ["GET /user/migrations", { - mediaType: { - previews: ["wyandotte"] - } - }], - listForOrg: ["GET /orgs/{org}/migrations", { - mediaType: { - previews: ["wyandotte"] - } - }], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] - } - }], - listReposForUser: ["GET /user/migrations/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] - } - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] - } - }], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] - } - }], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] - }, - orgs: { - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], - createInvitation: ["POST /orgs/{org}/invitations"], - createWebhook: ["POST /orgs/{org}/hooks"], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - get: ["GET /orgs/{org}"], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations"], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], - updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] - }, - packages: { - deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], - deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], - deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] - }], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { - renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] - }], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], - getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], - getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], - getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], - getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], - getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], - restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], - restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], - restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] - }, - projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] - } - }], - createCard: ["POST /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] - } - }], - createColumn: ["POST /projects/{project_id}/columns", { - mediaType: { - previews: ["inertia"] - } - }], - createForAuthenticatedUser: ["POST /user/projects", { - mediaType: { - previews: ["inertia"] - } - }], - createForOrg: ["POST /orgs/{org}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - createForRepo: ["POST /repos/{owner}/{repo}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - delete: ["DELETE /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - deleteCard: ["DELETE /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - deleteColumn: ["DELETE /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }], - get: ["GET /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getCard: ["GET /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getColumn: ["GET /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { - mediaType: { - previews: ["inertia"] - } - }], - listCards: ["GET /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] - } - }], - listCollaborators: ["GET /projects/{project_id}/collaborators", { - mediaType: { - previews: ["inertia"] - } - }], - listColumns: ["GET /projects/{project_id}/columns", { - mediaType: { - previews: ["inertia"] - } - }], - listForOrg: ["GET /orgs/{org}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - listForRepo: ["GET /repos/{owner}/{repo}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - listForUser: ["GET /users/{username}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - moveCard: ["POST /projects/columns/cards/{card_id}/moves", { - mediaType: { - previews: ["inertia"] - } - }], - moveColumn: ["POST /projects/columns/{column_id}/moves", { - mediaType: { - previews: ["inertia"] - } - }], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] - } - }], - update: ["PATCH /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - updateCard: ["PATCH /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - updateColumn: ["PATCH /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }] - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { - mediaType: { - previews: ["lydian"] - } - }], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] - }, - rateLimit: { - get: ["GET /rate_limit"] - }, - reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteLegacy: ["DELETE /reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }, { - deprecated: "octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy" - }], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }] - }, - repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] - } - }], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { - mediaType: { - previews: ["baptiste"] - } - }], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] - } - }], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { - mediaType: { - previews: ["london"] - } - }], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], - downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { - renamed: ["repos", "downloadZipballArchive"] - }], - downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], - downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { - mediaType: { - previews: ["london"] - } - }], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], - getAllTopics: ["GET /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] - } - }], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], - getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { - mediaType: { - previews: ["groot"] - } - }], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { - mediaType: { - previews: ["groot"] - } - }], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] - } - }], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { - renamed: ["repos", "updateStatusCheckProtection"] - }], - updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" - }] - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits", { - mediaType: { - previews: ["cloak"] - } - }], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics", { - mediaType: { - previews: ["mercy"] - } - }], - users: ["GET /search/users"] - }, - secretScanning: { - getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] - }, - teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] - }, - users: { - addEmailForAuthenticated: ["POST /user/emails"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], - list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] - } -}; - -const VERSION = "4.15.1"; - -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ - method, - url - }, defaults); - - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - - const scopeMethods = newMethods[scope]; - - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } - - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); - } - } - - return newMethods; -} - -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` - - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: undefined - }); - return requestWithDefaults(options); - } - - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); - } - - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } - - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); - - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - - if (!(alias in options)) { - options[alias] = options[name]; - } - - delete options[name]; - } - } - - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - - - return requestWithDefaults(...args); - } - - return Object.assign(withDecorations, requestWithDefaults); -} - -function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return _objectSpread2(_objectSpread2({}, api), {}, { - rest: api - }); -} -restEndpointMethods.VERSION = VERSION; - -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map deleted file mode 100644 index 6fee470a..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sources":["../dist-src/generated/endpoints.js","../dist-src/version.js","../dist-src/endpoints-to-methods.js","../dist-src/index.js"],"sourcesContent":["const Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\",\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\",\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\",\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\",\n ],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\",\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\",\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\",\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\",\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\",\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\",\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\",\n ],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\",\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\",\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\",\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] },\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\",\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\",\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\",\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\",\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\",\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\",\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\",\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\",\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\",\n ],\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\",\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\",\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\",\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\",\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\",\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\",\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"],\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\n \"POST /content_references/{content_reference_id}/attachments\",\n { mediaType: { previews: [\"corsair\"] } },\n ],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\",\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\",\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\",\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\",\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\",\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\",\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"],\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\",\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\",\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\",\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\",\n ],\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\",\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\",\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } },\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\",\n ],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"],\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\n \"GET /codes_of_conduct\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n getConductCode: [\n \"GET /codes_of_conduct/{key}\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n getForRepo: [\n \"GET /repos/{owner}/{repo}/community/code_of_conduct\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n },\n emojis: { get: [\"GET /emojis\"] },\n enterpriseAdmin: {\n disableSelectedOrganizationGithubActionsEnterprise: [\n \"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n enableSelectedOrganizationGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n getAllowedActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n getGithubActionsPermissionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions\",\n ],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n setAllowedActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n setGithubActionsPermissionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions\",\n ],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"],\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"],\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"],\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] },\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\",\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] },\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] },\n ],\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\",\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n { mediaType: { previews: [\"mockingbird\"] } },\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\",\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"],\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } },\n ],\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"],\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getStatusForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listForAuthenticatedUser: [\n \"GET /user/migrations\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listForOrg: [\n \"GET /orgs/{org}/migrations\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listReposForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"],\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\",\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\",\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\",\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\",\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\",\n ],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"],\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\",\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] },\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\",\n ],\n },\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\",\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\",\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\",\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n },\n projects: {\n addCollaborator: [\n \"PUT /projects/{project_id}/collaborators/{username}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createCard: [\n \"POST /projects/columns/{column_id}/cards\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createColumn: [\n \"POST /projects/{project_id}/columns\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForAuthenticatedUser: [\n \"POST /user/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForOrg: [\n \"POST /orgs/{org}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForRepo: [\n \"POST /repos/{owner}/{repo}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n delete: [\n \"DELETE /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n deleteCard: [\n \"DELETE /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n deleteColumn: [\n \"DELETE /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n get: [\n \"GET /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getCard: [\n \"GET /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getColumn: [\n \"GET /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listCards: [\n \"GET /projects/columns/{column_id}/cards\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listCollaborators: [\n \"GET /projects/{project_id}/collaborators\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listColumns: [\n \"GET /projects/{project_id}/columns\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForOrg: [\n \"GET /orgs/{org}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForRepo: [\n \"GET /repos/{owner}/{repo}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForUser: [\n \"GET /users/{username}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n moveCard: [\n \"POST /projects/columns/cards/{card_id}/moves\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n moveColumn: [\n \"POST /projects/columns/{column_id}/moves\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n update: [\n \"PATCH /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n updateCard: [\n \"PATCH /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n updateColumn: [\n \"PATCH /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\",\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\",\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\",\n { mediaType: { previews: [\"lydian\"] } },\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteLegacy: [\n \"DELETE /reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n {\n deprecated: \"octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy\",\n },\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\n \"POST /repos/{owner}/{repo}/pages\",\n { mediaType: { previews: [\"switcheroo\"] } },\n ],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\",\n { mediaType: { previews: [\"baptiste\"] } },\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\",\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n deletePagesSite: [\n \"DELETE /repos/{owner}/{repo}/pages\",\n { mediaType: { previews: [\"switcheroo\"] } },\n ],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\",\n { mediaType: { previews: [\"london\"] } },\n ],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] },\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\",\n { mediaType: { previews: [\"london\"] } },\n ],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n ],\n getAllTopics: [\n \"GET /repos/{owner}/{repo}/topics\",\n { mediaType: { previews: [\"mercy\"] } },\n ],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n ],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\",\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\",\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\",\n { mediaType: { previews: [\"groot\"] } },\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n { mediaType: { previews: [\"groot\"] } },\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\",\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\n \"PUT /repos/{owner}/{repo}/topics\",\n { mediaType: { previews: [\"mercy\"] } },\n ],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] },\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" },\n ],\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\", { mediaType: { previews: [\"cloak\"] } }],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", { mediaType: { previews: [\"mercy\"] } }],\n users: [\"GET /search/users\"],\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n ],\n listProjectsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"],\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"],\n },\n};\nexport default Endpoints;\n","export const VERSION = \"4.15.1\";\n","export function endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({ method, url }, defaults);\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n const scopeMethods = newMethods[scope];\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args);\n // There are currently no other decorations than `.mapToData`\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined,\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n delete options[name];\n }\n }\n return requestWithDefaults(options);\n }\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n","import ENDPOINTS from \"./generated/endpoints\";\nimport { VERSION } from \"./version\";\nimport { endpointsToMethods } from \"./endpoints-to-methods\";\nexport function restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n ...api,\n rest: api,\n };\n}\nrestEndpointMethods.VERSION = VERSION;\n"],"names":["Endpoints","actions","addSelectedRepoToOrgSecret","cancelWorkflowRun","createOrUpdateEnvironmentSecret","createOrUpdateOrgSecret","createOrUpdateRepoSecret","createRegistrationTokenForOrg","createRegistrationTokenForRepo","createRemoveTokenForOrg","createRemoveTokenForRepo","createWorkflowDispatch","deleteArtifact","deleteEnvironmentSecret","deleteOrgSecret","deleteRepoSecret","deleteSelfHostedRunnerFromOrg","deleteSelfHostedRunnerFromRepo","deleteWorkflowRun","deleteWorkflowRunLogs","disableSelectedRepositoryGithubActionsOrganization","disableWorkflow","downloadArtifact","downloadJobLogsForWorkflowRun","downloadWorkflowRunLogs","enableSelectedRepositoryGithubActionsOrganization","enableWorkflow","getAllowedActionsOrganization","getAllowedActionsRepository","getArtifact","getEnvironmentPublicKey","getEnvironmentSecret","getGithubActionsPermissionsOrganization","getGithubActionsPermissionsRepository","getJobForWorkflowRun","getOrgPublicKey","getOrgSecret","getPendingDeploymentsForRun","getRepoPermissions","renamed","getRepoPublicKey","getRepoSecret","getReviewsForRun","getSelfHostedRunnerForOrg","getSelfHostedRunnerForRepo","getWorkflow","getWorkflowRun","getWorkflowRunUsage","getWorkflowUsage","listArtifactsForRepo","listEnvironmentSecrets","listJobsForWorkflowRun","listOrgSecrets","listRepoSecrets","listRepoWorkflows","listRunnerApplicationsForOrg","listRunnerApplicationsForRepo","listSelectedReposForOrgSecret","listSelectedRepositoriesEnabledGithubActionsOrganization","listSelfHostedRunnersForOrg","listSelfHostedRunnersForRepo","listWorkflowRunArtifacts","listWorkflowRuns","listWorkflowRunsForRepo","reRunWorkflow","removeSelectedRepoFromOrgSecret","reviewPendingDeploymentsForRun","setAllowedActionsOrganization","setAllowedActionsRepository","setGithubActionsPermissionsOrganization","setGithubActionsPermissionsRepository","setSelectedReposForOrgSecret","setSelectedRepositoriesEnabledGithubActionsOrganization","activity","checkRepoIsStarredByAuthenticatedUser","deleteRepoSubscription","deleteThreadSubscription","getFeeds","getRepoSubscription","getThread","getThreadSubscriptionForAuthenticatedUser","listEventsForAuthenticatedUser","listNotificationsForAuthenticatedUser","listOrgEventsForAuthenticatedUser","listPublicEvents","listPublicEventsForRepoNetwork","listPublicEventsForUser","listPublicOrgEvents","listReceivedEventsForUser","listReceivedPublicEventsForUser","listRepoEvents","listRepoNotificationsForAuthenticatedUser","listReposStarredByAuthenticatedUser","listReposStarredByUser","listReposWatchedByUser","listStargazersForRepo","listWatchedReposForAuthenticatedUser","listWatchersForRepo","markNotificationsAsRead","markRepoNotificationsAsRead","markThreadAsRead","setRepoSubscription","setThreadSubscription","starRepoForAuthenticatedUser","unstarRepoForAuthenticatedUser","apps","addRepoToInstallation","checkToken","createContentAttachment","mediaType","previews","createFromManifest","createInstallationAccessToken","deleteAuthorization","deleteInstallation","deleteToken","getAuthenticated","getBySlug","getInstallation","getOrgInstallation","getRepoInstallation","getSubscriptionPlanForAccount","getSubscriptionPlanForAccountStubbed","getUserInstallation","getWebhookConfigForApp","listAccountsForPlan","listAccountsForPlanStubbed","listInstallationReposForAuthenticatedUser","listInstallations","listInstallationsForAuthenticatedUser","listPlans","listPlansStubbed","listReposAccessibleToInstallation","listSubscriptionsForAuthenticatedUser","listSubscriptionsForAuthenticatedUserStubbed","removeRepoFromInstallation","resetToken","revokeInstallationAccessToken","scopeToken","suspendInstallation","unsuspendInstallation","updateWebhookConfigForApp","billing","getGithubActionsBillingOrg","getGithubActionsBillingUser","getGithubPackagesBillingOrg","getGithubPackagesBillingUser","getSharedStorageBillingOrg","getSharedStorageBillingUser","checks","create","createSuite","get","getSuite","listAnnotations","listForRef","listForSuite","listSuitesForRef","rerequestSuite","setSuitesPreferences","update","codeScanning","deleteAnalysis","getAlert","renamedParameters","alert_id","getAnalysis","getSarif","listAlertsForRepo","listAlertsInstances","listRecentAnalyses","updateAlert","uploadSarif","codesOfConduct","getAllCodesOfConduct","getConductCode","getForRepo","emojis","enterpriseAdmin","disableSelectedOrganizationGithubActionsEnterprise","enableSelectedOrganizationGithubActionsEnterprise","getAllowedActionsEnterprise","getGithubActionsPermissionsEnterprise","listSelectedOrganizationsEnabledGithubActionsEnterprise","setAllowedActionsEnterprise","setGithubActionsPermissionsEnterprise","setSelectedOrganizationsEnabledGithubActionsEnterprise","gists","checkIsStarred","createComment","delete","deleteComment","fork","getComment","getRevision","list","listComments","listCommits","listForUser","listForks","listPublic","listStarred","star","unstar","updateComment","git","createBlob","createCommit","createRef","createTag","createTree","deleteRef","getBlob","getCommit","getRef","getTag","getTree","listMatchingRefs","updateRef","gitignore","getAllTemplates","getTemplate","interactions","getRestrictionsForAuthenticatedUser","getRestrictionsForOrg","getRestrictionsForRepo","getRestrictionsForYourPublicRepos","removeRestrictionsForAuthenticatedUser","removeRestrictionsForOrg","removeRestrictionsForRepo","removeRestrictionsForYourPublicRepos","setRestrictionsForAuthenticatedUser","setRestrictionsForOrg","setRestrictionsForRepo","setRestrictionsForYourPublicRepos","issues","addAssignees","addLabels","checkUserCanBeAssigned","createLabel","createMilestone","deleteLabel","deleteMilestone","getEvent","getLabel","getMilestone","listAssignees","listCommentsForRepo","listEvents","listEventsForRepo","listEventsForTimeline","listForAuthenticatedUser","listForOrg","listForRepo","listLabelsForMilestone","listLabelsForRepo","listLabelsOnIssue","listMilestones","lock","removeAllLabels","removeAssignees","removeLabel","setLabels","unlock","updateLabel","updateMilestone","licenses","getAllCommonlyUsed","markdown","render","renderRaw","headers","meta","getOctocat","getZen","root","migrations","cancelImport","deleteArchiveForAuthenticatedUser","deleteArchiveForOrg","downloadArchiveForOrg","getArchiveForAuthenticatedUser","getCommitAuthors","getImportStatus","getLargeFiles","getStatusForAuthenticatedUser","getStatusForOrg","listReposForOrg","listReposForUser","mapCommitAuthor","setLfsPreference","startForAuthenticatedUser","startForOrg","startImport","unlockRepoForAuthenticatedUser","unlockRepoForOrg","updateImport","orgs","blockUser","cancelInvitation","checkBlockedUser","checkMembershipForUser","checkPublicMembershipForUser","convertMemberToOutsideCollaborator","createInvitation","createWebhook","deleteWebhook","getMembershipForAuthenticatedUser","getMembershipForUser","getWebhook","getWebhookConfigForOrg","listAppInstallations","listBlockedUsers","listFailedInvitations","listInvitationTeams","listMembers","listMembershipsForAuthenticatedUser","listOutsideCollaborators","listPendingInvitations","listPublicMembers","listWebhooks","pingWebhook","removeMember","removeMembershipForUser","removeOutsideCollaborator","removePublicMembershipForAuthenticatedUser","setMembershipForUser","setPublicMembershipForAuthenticatedUser","unblockUser","updateMembershipForAuthenticatedUser","updateWebhook","updateWebhookConfigForOrg","packages","deletePackageForAuthenticatedUser","deletePackageForOrg","deletePackageVersionForAuthenticatedUser","deletePackageVersionForOrg","getAllPackageVersionsForAPackageOwnedByAnOrg","getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser","getAllPackageVersionsForPackageOwnedByAuthenticatedUser","getAllPackageVersionsForPackageOwnedByOrg","getAllPackageVersionsForPackageOwnedByUser","getPackageForAuthenticatedUser","getPackageForOrganization","getPackageForUser","getPackageVersionForAuthenticatedUser","getPackageVersionForOrganization","getPackageVersionForUser","restorePackageForAuthenticatedUser","restorePackageForOrg","restorePackageVersionForAuthenticatedUser","restorePackageVersionForOrg","projects","addCollaborator","createCard","createColumn","createForAuthenticatedUser","createForOrg","createForRepo","deleteCard","deleteColumn","getCard","getColumn","getPermissionForUser","listCards","listCollaborators","listColumns","moveCard","moveColumn","removeCollaborator","updateCard","updateColumn","pulls","checkIfMerged","createReplyForReviewComment","createReview","createReviewComment","deletePendingReview","deleteReviewComment","dismissReview","getReview","getReviewComment","listCommentsForReview","listFiles","listRequestedReviewers","listReviewComments","listReviewCommentsForRepo","listReviews","merge","removeRequestedReviewers","requestReviewers","submitReview","updateBranch","updateReview","updateReviewComment","rateLimit","reactions","createForCommitComment","createForIssue","createForIssueComment","createForPullRequestReviewComment","createForTeamDiscussionCommentInOrg","createForTeamDiscussionInOrg","deleteForCommitComment","deleteForIssue","deleteForIssueComment","deleteForPullRequestComment","deleteForTeamDiscussion","deleteForTeamDiscussionComment","deleteLegacy","deprecated","listForCommitComment","listForIssue","listForIssueComment","listForPullRequestReviewComment","listForTeamDiscussionCommentInOrg","listForTeamDiscussionInOrg","repos","acceptInvitation","addAppAccessRestrictions","mapToData","addStatusCheckContexts","addTeamAccessRestrictions","addUserAccessRestrictions","checkCollaborator","checkVulnerabilityAlerts","compareCommits","createCommitComment","createCommitSignatureProtection","createCommitStatus","createDeployKey","createDeployment","createDeploymentStatus","createDispatchEvent","createFork","createInOrg","createOrUpdateEnvironment","createOrUpdateFileContents","createPagesSite","createRelease","createUsingTemplate","declineInvitation","deleteAccessRestrictions","deleteAdminBranchProtection","deleteAnEnvironment","deleteBranchProtection","deleteCommitComment","deleteCommitSignatureProtection","deleteDeployKey","deleteDeployment","deleteFile","deleteInvitation","deletePagesSite","deletePullRequestReviewProtection","deleteRelease","deleteReleaseAsset","disableAutomatedSecurityFixes","disableVulnerabilityAlerts","downloadArchive","downloadTarballArchive","downloadZipballArchive","enableAutomatedSecurityFixes","enableVulnerabilityAlerts","getAccessRestrictions","getAdminBranchProtection","getAllEnvironments","getAllStatusCheckContexts","getAllTopics","getAppsWithAccessToProtectedBranch","getBranch","getBranchProtection","getClones","getCodeFrequencyStats","getCollaboratorPermissionLevel","getCombinedStatusForRef","getCommitActivityStats","getCommitComment","getCommitSignatureProtection","getCommunityProfileMetrics","getContent","getContributorsStats","getDeployKey","getDeployment","getDeploymentStatus","getEnvironment","getLatestPagesBuild","getLatestRelease","getPages","getPagesBuild","getParticipationStats","getPullRequestReviewProtection","getPunchCardStats","getReadme","getReadmeInDirectory","getRelease","getReleaseAsset","getReleaseByTag","getStatusChecksProtection","getTeamsWithAccessToProtectedBranch","getTopPaths","getTopReferrers","getUsersWithAccessToProtectedBranch","getViews","getWebhookConfigForRepo","listBranches","listBranchesForHeadCommit","listCommentsForCommit","listCommitCommentsForRepo","listCommitStatusesForRef","listContributors","listDeployKeys","listDeploymentStatuses","listDeployments","listInvitations","listInvitationsForAuthenticatedUser","listLanguages","listPagesBuilds","listPullRequestsAssociatedWithCommit","listReleaseAssets","listReleases","listTags","listTeams","removeAppAccessRestrictions","removeStatusCheckContexts","removeStatusCheckProtection","removeTeamAccessRestrictions","removeUserAccessRestrictions","renameBranch","replaceAllTopics","requestPagesBuild","setAdminBranchProtection","setAppAccessRestrictions","setStatusCheckContexts","setTeamAccessRestrictions","setUserAccessRestrictions","testPushWebhook","transfer","updateBranchProtection","updateCommitComment","updateInformationAboutPagesSite","updateInvitation","updatePullRequestReviewProtection","updateRelease","updateReleaseAsset","updateStatusCheckPotection","updateStatusCheckProtection","updateWebhookConfigForRepo","uploadReleaseAsset","baseUrl","search","code","commits","issuesAndPullRequests","labels","topics","users","secretScanning","teams","addOrUpdateMembershipForUserInOrg","addOrUpdateProjectPermissionsInOrg","addOrUpdateRepoPermissionsInOrg","checkPermissionsForProjectInOrg","checkPermissionsForRepoInOrg","createDiscussionCommentInOrg","createDiscussionInOrg","deleteDiscussionCommentInOrg","deleteDiscussionInOrg","deleteInOrg","getByName","getDiscussionCommentInOrg","getDiscussionInOrg","getMembershipForUserInOrg","listChildInOrg","listDiscussionCommentsInOrg","listDiscussionsInOrg","listMembersInOrg","listPendingInvitationsInOrg","listProjectsInOrg","listReposInOrg","removeMembershipForUserInOrg","removeProjectInOrg","removeRepoInOrg","updateDiscussionCommentInOrg","updateDiscussionInOrg","updateInOrg","addEmailForAuthenticated","block","checkBlocked","checkFollowingForUser","checkPersonIsFollowedByAuthenticated","createGpgKeyForAuthenticated","createPublicSshKeyForAuthenticated","deleteEmailForAuthenticated","deleteGpgKeyForAuthenticated","deletePublicSshKeyForAuthenticated","follow","getByUsername","getContextForUser","getGpgKeyForAuthenticated","getPublicSshKeyForAuthenticated","listBlockedByAuthenticated","listEmailsForAuthenticated","listFollowedByAuthenticated","listFollowersForAuthenticatedUser","listFollowersForUser","listFollowingForUser","listGpgKeysForAuthenticated","listGpgKeysForUser","listPublicEmailsForAuthenticated","listPublicKeysForUser","listPublicSshKeysForAuthenticated","setPrimaryEmailVisibilityForAuthenticated","unblock","unfollow","updateAuthenticated","VERSION","endpointsToMethods","octokit","endpointsMap","newMethods","scope","endpoints","Object","entries","methodName","endpoint","route","defaults","decorations","method","url","split","endpointDefaults","assign","scopeMethods","decorate","request","requestWithDefaults","withDecorations","args","options","data","undefined","newScope","newMethodName","log","warn","name","alias","restEndpointMethods","api","ENDPOINTS","rest"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,MAAMA,SAAS,GAAG;AACdC,EAAAA,OAAO,EAAE;AACLC,IAAAA,0BAA0B,EAAE,CACxB,4EADwB,CADvB;AAILC,IAAAA,iBAAiB,EAAE,CACf,yDADe,CAJd;AAOLC,IAAAA,+BAA+B,EAAE,CAC7B,yFAD6B,CAP5B;AAULC,IAAAA,uBAAuB,EAAE,CAAC,+CAAD,CAVpB;AAWLC,IAAAA,wBAAwB,EAAE,CACtB,yDADsB,CAXrB;AAcLC,IAAAA,6BAA6B,EAAE,CAC3B,qDAD2B,CAd1B;AAiBLC,IAAAA,8BAA8B,EAAE,CAC5B,+DAD4B,CAjB3B;AAoBLC,IAAAA,uBAAuB,EAAE,CAAC,+CAAD,CApBpB;AAqBLC,IAAAA,wBAAwB,EAAE,CACtB,yDADsB,CArBrB;AAwBLC,IAAAA,sBAAsB,EAAE,CACpB,uEADoB,CAxBnB;AA2BLC,IAAAA,cAAc,EAAE,CACZ,8DADY,CA3BX;AA8BLC,IAAAA,uBAAuB,EAAE,CACrB,4FADqB,CA9BpB;AAiCLC,IAAAA,eAAe,EAAE,CAAC,kDAAD,CAjCZ;AAkCLC,IAAAA,gBAAgB,EAAE,CACd,4DADc,CAlCb;AAqCLC,IAAAA,6BAA6B,EAAE,CAC3B,gDAD2B,CArC1B;AAwCLC,IAAAA,8BAA8B,EAAE,CAC5B,0DAD4B,CAxC3B;AA2CLC,IAAAA,iBAAiB,EAAE,CAAC,oDAAD,CA3Cd;AA4CLC,IAAAA,qBAAqB,EAAE,CACnB,yDADmB,CA5ClB;AA+CLC,IAAAA,kDAAkD,EAAE,CAChD,qEADgD,CA/C/C;AAkDLC,IAAAA,eAAe,EAAE,CACb,mEADa,CAlDZ;AAqDLC,IAAAA,gBAAgB,EAAE,CACd,4EADc,CArDb;AAwDLC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CAxD1B;AA2DLC,IAAAA,uBAAuB,EAAE,CACrB,sDADqB,CA3DpB;AA8DLC,IAAAA,iDAAiD,EAAE,CAC/C,kEAD+C,CA9D9C;AAiELC,IAAAA,cAAc,EAAE,CACZ,kEADY,CAjEX;AAoELC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CApE1B;AAuELC,IAAAA,2BAA2B,EAAE,CACzB,gEADyB,CAvExB;AA0ELC,IAAAA,WAAW,EAAE,CAAC,2DAAD,CA1ER;AA2ELC,IAAAA,uBAAuB,EAAE,CACrB,sFADqB,CA3EpB;AA8ELC,IAAAA,oBAAoB,EAAE,CAClB,yFADkB,CA9EjB;AAiFLC,IAAAA,uCAAuC,EAAE,CACrC,qCADqC,CAjFpC;AAoFLC,IAAAA,qCAAqC,EAAE,CACnC,+CADmC,CApFlC;AAuFLC,IAAAA,oBAAoB,EAAE,CAAC,iDAAD,CAvFjB;AAwFLC,IAAAA,eAAe,EAAE,CAAC,4CAAD,CAxFZ;AAyFLC,IAAAA,YAAY,EAAE,CAAC,+CAAD,CAzFT;AA0FLC,IAAAA,2BAA2B,EAAE,CACzB,qEADyB,CA1FxB;AA6FLC,IAAAA,kBAAkB,EAAE,CAChB,+CADgB,EAEhB,EAFgB,EAGhB;AAAEC,MAAAA,OAAO,EAAE,CAAC,SAAD,EAAY,uCAAZ;AAAX,KAHgB,CA7Ff;AAkGLC,IAAAA,gBAAgB,EAAE,CAAC,sDAAD,CAlGb;AAmGLC,IAAAA,aAAa,EAAE,CAAC,yDAAD,CAnGV;AAoGLC,IAAAA,gBAAgB,EAAE,CACd,2DADc,CApGb;AAuGLC,IAAAA,yBAAyB,EAAE,CAAC,6CAAD,CAvGtB;AAwGLC,IAAAA,0BAA0B,EAAE,CACxB,uDADwB,CAxGvB;AA2GLC,IAAAA,WAAW,EAAE,CAAC,2DAAD,CA3GR;AA4GLC,IAAAA,cAAc,EAAE,CAAC,iDAAD,CA5GX;AA6GLC,IAAAA,mBAAmB,EAAE,CACjB,wDADiB,CA7GhB;AAgHLC,IAAAA,gBAAgB,EAAE,CACd,kEADc,CAhHb;AAmHLC,IAAAA,oBAAoB,EAAE,CAAC,6CAAD,CAnHjB;AAoHLC,IAAAA,sBAAsB,EAAE,CACpB,2EADoB,CApHnB;AAuHLC,IAAAA,sBAAsB,EAAE,CACpB,sDADoB,CAvHnB;AA0HLC,IAAAA,cAAc,EAAE,CAAC,iCAAD,CA1HX;AA2HLC,IAAAA,eAAe,EAAE,CAAC,2CAAD,CA3HZ;AA4HLC,IAAAA,iBAAiB,EAAE,CAAC,6CAAD,CA5Hd;AA6HLC,IAAAA,4BAA4B,EAAE,CAAC,2CAAD,CA7HzB;AA8HLC,IAAAA,6BAA6B,EAAE,CAC3B,qDAD2B,CA9H1B;AAiILC,IAAAA,6BAA6B,EAAE,CAC3B,4DAD2B,CAjI1B;AAoILC,IAAAA,wDAAwD,EAAE,CACtD,kDADsD,CApIrD;AAuILC,IAAAA,2BAA2B,EAAE,CAAC,iCAAD,CAvIxB;AAwILC,IAAAA,4BAA4B,EAAE,CAAC,2CAAD,CAxIzB;AAyILC,IAAAA,wBAAwB,EAAE,CACtB,2DADsB,CAzIrB;AA4ILC,IAAAA,gBAAgB,EAAE,CACd,gEADc,CA5Ib;AA+ILC,IAAAA,uBAAuB,EAAE,CAAC,wCAAD,CA/IpB;AAgJLC,IAAAA,aAAa,EAAE,CAAC,wDAAD,CAhJV;AAiJLC,IAAAA,+BAA+B,EAAE,CAC7B,+EAD6B,CAjJ5B;AAoJLC,IAAAA,8BAA8B,EAAE,CAC5B,sEAD4B,CApJ3B;AAuJLC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CAvJ1B;AA0JLC,IAAAA,2BAA2B,EAAE,CACzB,gEADyB,CA1JxB;AA6JLC,IAAAA,uCAAuC,EAAE,CACrC,qCADqC,CA7JpC;AAgKLC,IAAAA,qCAAqC,EAAE,CACnC,+CADmC,CAhKlC;AAmKLC,IAAAA,4BAA4B,EAAE,CAC1B,4DAD0B,CAnKzB;AAsKLC,IAAAA,uDAAuD,EAAE,CACrD,kDADqD;AAtKpD,GADK;AA2KdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,qCAAqC,EAAE,CAAC,kCAAD,CADjC;AAENC,IAAAA,sBAAsB,EAAE,CAAC,2CAAD,CAFlB;AAGNC,IAAAA,wBAAwB,EAAE,CACtB,wDADsB,CAHpB;AAMNC,IAAAA,QAAQ,EAAE,CAAC,YAAD,CANJ;AAONC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAPf;AAQNC,IAAAA,SAAS,EAAE,CAAC,wCAAD,CARL;AASNC,IAAAA,yCAAyC,EAAE,CACvC,qDADuC,CATrC;AAYNC,IAAAA,8BAA8B,EAAE,CAAC,8BAAD,CAZ1B;AAaNC,IAAAA,qCAAqC,EAAE,CAAC,oBAAD,CAbjC;AAcNC,IAAAA,iCAAiC,EAAE,CAC/B,yCAD+B,CAd7B;AAiBNC,IAAAA,gBAAgB,EAAE,CAAC,aAAD,CAjBZ;AAkBNC,IAAAA,8BAA8B,EAAE,CAAC,qCAAD,CAlB1B;AAmBNC,IAAAA,uBAAuB,EAAE,CAAC,qCAAD,CAnBnB;AAoBNC,IAAAA,mBAAmB,EAAE,CAAC,wBAAD,CApBf;AAqBNC,IAAAA,yBAAyB,EAAE,CAAC,uCAAD,CArBrB;AAsBNC,IAAAA,+BAA+B,EAAE,CAC7B,8CAD6B,CAtB3B;AAyBNC,IAAAA,cAAc,EAAE,CAAC,kCAAD,CAzBV;AA0BNC,IAAAA,yCAAyC,EAAE,CACvC,yCADuC,CA1BrC;AA6BNC,IAAAA,mCAAmC,EAAE,CAAC,mBAAD,CA7B/B;AA8BNC,IAAAA,sBAAsB,EAAE,CAAC,+BAAD,CA9BlB;AA+BNC,IAAAA,sBAAsB,EAAE,CAAC,qCAAD,CA/BlB;AAgCNC,IAAAA,qBAAqB,EAAE,CAAC,sCAAD,CAhCjB;AAiCNC,IAAAA,oCAAoC,EAAE,CAAC,yBAAD,CAjChC;AAkCNC,IAAAA,mBAAmB,EAAE,CAAC,uCAAD,CAlCf;AAmCNC,IAAAA,uBAAuB,EAAE,CAAC,oBAAD,CAnCnB;AAoCNC,IAAAA,2BAA2B,EAAE,CAAC,yCAAD,CApCvB;AAqCNC,IAAAA,gBAAgB,EAAE,CAAC,0CAAD,CArCZ;AAsCNC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAtCf;AAuCNC,IAAAA,qBAAqB,EAAE,CACnB,qDADmB,CAvCjB;AA0CNC,IAAAA,4BAA4B,EAAE,CAAC,kCAAD,CA1CxB;AA2CNC,IAAAA,8BAA8B,EAAE,CAAC,qCAAD;AA3C1B,GA3KI;AAwNdC,EAAAA,IAAI,EAAE;AACFC,IAAAA,qBAAqB,EAAE,CACnB,wEADmB,CADrB;AAIFC,IAAAA,UAAU,EAAE,CAAC,sCAAD,CAJV;AAKFC,IAAAA,uBAAuB,EAAE,CACrB,6DADqB,EAErB;AAAEC,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFqB,CALvB;AASFC,IAAAA,kBAAkB,EAAE,CAAC,wCAAD,CATlB;AAUFC,IAAAA,6BAA6B,EAAE,CAC3B,yDAD2B,CAV7B;AAaFC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAbnB;AAcFC,IAAAA,kBAAkB,EAAE,CAAC,6CAAD,CAdlB;AAeFC,IAAAA,WAAW,EAAE,CAAC,wCAAD,CAfX;AAgBFC,IAAAA,gBAAgB,EAAE,CAAC,UAAD,CAhBhB;AAiBFC,IAAAA,SAAS,EAAE,CAAC,sBAAD,CAjBT;AAkBFC,IAAAA,eAAe,EAAE,CAAC,0CAAD,CAlBf;AAmBFC,IAAAA,kBAAkB,EAAE,CAAC,8BAAD,CAnBlB;AAoBFC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CApBnB;AAqBFC,IAAAA,6BAA6B,EAAE,CAC3B,gDAD2B,CArB7B;AAwBFC,IAAAA,oCAAoC,EAAE,CAClC,wDADkC,CAxBpC;AA2BFC,IAAAA,mBAAmB,EAAE,CAAC,oCAAD,CA3BnB;AA4BFC,IAAAA,sBAAsB,EAAE,CAAC,sBAAD,CA5BtB;AA6BFC,IAAAA,mBAAmB,EAAE,CAAC,mDAAD,CA7BnB;AA8BFC,IAAAA,0BAA0B,EAAE,CACxB,2DADwB,CA9B1B;AAiCFC,IAAAA,yCAAyC,EAAE,CACvC,wDADuC,CAjCzC;AAoCFC,IAAAA,iBAAiB,EAAE,CAAC,wBAAD,CApCjB;AAqCFC,IAAAA,qCAAqC,EAAE,CAAC,yBAAD,CArCrC;AAsCFC,IAAAA,SAAS,EAAE,CAAC,gCAAD,CAtCT;AAuCFC,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CAvChB;AAwCFC,IAAAA,iCAAiC,EAAE,CAAC,gCAAD,CAxCjC;AAyCFC,IAAAA,qCAAqC,EAAE,CAAC,iCAAD,CAzCrC;AA0CFC,IAAAA,4CAA4C,EAAE,CAC1C,yCAD0C,CA1C5C;AA6CFC,IAAAA,0BAA0B,EAAE,CACxB,2EADwB,CA7C1B;AAgDFC,IAAAA,UAAU,EAAE,CAAC,uCAAD,CAhDV;AAiDFC,IAAAA,6BAA6B,EAAE,CAAC,4BAAD,CAjD7B;AAkDFC,IAAAA,UAAU,EAAE,CAAC,6CAAD,CAlDV;AAmDFC,IAAAA,mBAAmB,EAAE,CAAC,oDAAD,CAnDnB;AAoDFC,IAAAA,qBAAqB,EAAE,CACnB,uDADmB,CApDrB;AAuDFC,IAAAA,yBAAyB,EAAE,CAAC,wBAAD;AAvDzB,GAxNQ;AAiRdC,EAAAA,OAAO,EAAE;AACLC,IAAAA,0BAA0B,EAAE,CAAC,0CAAD,CADvB;AAELC,IAAAA,2BAA2B,EAAE,CACzB,gDADyB,CAFxB;AAKLC,IAAAA,2BAA2B,EAAE,CAAC,2CAAD,CALxB;AAMLC,IAAAA,4BAA4B,EAAE,CAC1B,iDAD0B,CANzB;AASLC,IAAAA,0BAA0B,EAAE,CACxB,iDADwB,CATvB;AAYLC,IAAAA,2BAA2B,EAAE,CACzB,uDADyB;AAZxB,GAjRK;AAiSdC,EAAAA,MAAM,EAAE;AACJC,IAAAA,MAAM,EAAE,CAAC,uCAAD,CADJ;AAEJC,IAAAA,WAAW,EAAE,CAAC,yCAAD,CAFT;AAGJC,IAAAA,GAAG,EAAE,CAAC,qDAAD,CAHD;AAIJC,IAAAA,QAAQ,EAAE,CAAC,yDAAD,CAJN;AAKJC,IAAAA,eAAe,EAAE,CACb,iEADa,CALb;AAQJC,IAAAA,UAAU,EAAE,CAAC,oDAAD,CARR;AASJC,IAAAA,YAAY,EAAE,CACV,oEADU,CATV;AAYJC,IAAAA,gBAAgB,EAAE,CAAC,sDAAD,CAZd;AAaJC,IAAAA,cAAc,EAAE,CACZ,oEADY,CAbZ;AAgBJC,IAAAA,oBAAoB,EAAE,CAClB,sDADkB,CAhBlB;AAmBJC,IAAAA,MAAM,EAAE,CAAC,uDAAD;AAnBJ,GAjSM;AAsTdC,EAAAA,YAAY,EAAE;AACVC,IAAAA,cAAc,EAAE,CACZ,oFADY,CADN;AAIVC,IAAAA,QAAQ,EAAE,CACN,+DADM,EAEN,EAFM,EAGN;AAAEC,MAAAA,iBAAiB,EAAE;AAAEC,QAAAA,QAAQ,EAAE;AAAZ;AAArB,KAHM,CAJA;AASVC,IAAAA,WAAW,EAAE,CACT,gEADS,CATH;AAYVC,IAAAA,QAAQ,EAAE,CAAC,2DAAD,CAZA;AAaVC,IAAAA,iBAAiB,EAAE,CAAC,gDAAD,CAbT;AAcVC,IAAAA,mBAAmB,EAAE,CACjB,yEADiB,CAdX;AAiBVC,IAAAA,kBAAkB,EAAE,CAAC,kDAAD,CAjBV;AAkBVC,IAAAA,WAAW,EAAE,CACT,iEADS,CAlBH;AAqBVC,IAAAA,WAAW,EAAE,CAAC,iDAAD;AArBH,GAtTA;AA6UdC,EAAAA,cAAc,EAAE;AACZC,IAAAA,oBAAoB,EAAE,CAClB,uBADkB,EAElB;AAAEjE,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFkB,CADV;AAKZiE,IAAAA,cAAc,EAAE,CACZ,6BADY,EAEZ;AAAElE,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFY,CALJ;AASZkE,IAAAA,UAAU,EAAE,CACR,qDADQ,EAER;AAAEnE,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFQ;AATA,GA7UF;AA2VdmE,EAAAA,MAAM,EAAE;AAAEzB,IAAAA,GAAG,EAAE,CAAC,aAAD;AAAP,GA3VM;AA4Vd0B,EAAAA,eAAe,EAAE;AACbC,IAAAA,kDAAkD,EAAE,CAChD,6EADgD,CADvC;AAIbC,IAAAA,iDAAiD,EAAE,CAC/C,0EAD+C,CAJtC;AAObC,IAAAA,2BAA2B,EAAE,CACzB,oEADyB,CAPhB;AAUbC,IAAAA,qCAAqC,EAAE,CACnC,mDADmC,CAV1B;AAabC,IAAAA,uDAAuD,EAAE,CACrD,iEADqD,CAb5C;AAgBbC,IAAAA,2BAA2B,EAAE,CACzB,oEADyB,CAhBhB;AAmBbC,IAAAA,qCAAqC,EAAE,CACnC,mDADmC,CAnB1B;AAsBbC,IAAAA,sDAAsD,EAAE,CACpD,iEADoD;AAtB3C,GA5VH;AAsXdC,EAAAA,KAAK,EAAE;AACHC,IAAAA,cAAc,EAAE,CAAC,2BAAD,CADb;AAEHtC,IAAAA,MAAM,EAAE,CAAC,aAAD,CAFL;AAGHuC,IAAAA,aAAa,EAAE,CAAC,gCAAD,CAHZ;AAIHC,IAAAA,MAAM,EAAE,CAAC,yBAAD,CAJL;AAKHC,IAAAA,aAAa,EAAE,CAAC,+CAAD,CALZ;AAMHC,IAAAA,IAAI,EAAE,CAAC,6BAAD,CANH;AAOHxC,IAAAA,GAAG,EAAE,CAAC,sBAAD,CAPF;AAQHyC,IAAAA,UAAU,EAAE,CAAC,4CAAD,CART;AASHC,IAAAA,WAAW,EAAE,CAAC,4BAAD,CATV;AAUHC,IAAAA,IAAI,EAAE,CAAC,YAAD,CAVH;AAWHC,IAAAA,YAAY,EAAE,CAAC,+BAAD,CAXX;AAYHC,IAAAA,WAAW,EAAE,CAAC,8BAAD,CAZV;AAaHC,IAAAA,WAAW,EAAE,CAAC,6BAAD,CAbV;AAcHC,IAAAA,SAAS,EAAE,CAAC,4BAAD,CAdR;AAeHC,IAAAA,UAAU,EAAE,CAAC,mBAAD,CAfT;AAgBHC,IAAAA,WAAW,EAAE,CAAC,oBAAD,CAhBV;AAiBHC,IAAAA,IAAI,EAAE,CAAC,2BAAD,CAjBH;AAkBHC,IAAAA,MAAM,EAAE,CAAC,8BAAD,CAlBL;AAmBH3C,IAAAA,MAAM,EAAE,CAAC,wBAAD,CAnBL;AAoBH4C,IAAAA,aAAa,EAAE,CAAC,8CAAD;AApBZ,GAtXO;AA4YdC,EAAAA,GAAG,EAAE;AACDC,IAAAA,UAAU,EAAE,CAAC,sCAAD,CADX;AAEDC,IAAAA,YAAY,EAAE,CAAC,wCAAD,CAFb;AAGDC,IAAAA,SAAS,EAAE,CAAC,qCAAD,CAHV;AAIDC,IAAAA,SAAS,EAAE,CAAC,qCAAD,CAJV;AAKDC,IAAAA,UAAU,EAAE,CAAC,sCAAD,CALX;AAMDC,IAAAA,SAAS,EAAE,CAAC,6CAAD,CANV;AAODC,IAAAA,OAAO,EAAE,CAAC,gDAAD,CAPR;AAQDC,IAAAA,SAAS,EAAE,CAAC,oDAAD,CARV;AASDC,IAAAA,MAAM,EAAE,CAAC,yCAAD,CATP;AAUDC,IAAAA,MAAM,EAAE,CAAC,8CAAD,CAVP;AAWDC,IAAAA,OAAO,EAAE,CAAC,gDAAD,CAXR;AAYDC,IAAAA,gBAAgB,EAAE,CAAC,mDAAD,CAZjB;AAaDC,IAAAA,SAAS,EAAE,CAAC,4CAAD;AAbV,GA5YS;AA2ZdC,EAAAA,SAAS,EAAE;AACPC,IAAAA,eAAe,EAAE,CAAC,0BAAD,CADV;AAEPC,IAAAA,WAAW,EAAE,CAAC,iCAAD;AAFN,GA3ZG;AA+ZdC,EAAAA,YAAY,EAAE;AACVC,IAAAA,mCAAmC,EAAE,CAAC,8BAAD,CAD3B;AAEVC,IAAAA,qBAAqB,EAAE,CAAC,oCAAD,CAFb;AAGVC,IAAAA,sBAAsB,EAAE,CAAC,8CAAD,CAHd;AAIVC,IAAAA,iCAAiC,EAAE,CAC/B,8BAD+B,EAE/B,EAF+B,EAG/B;AAAE3L,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,qCAAjB;AAAX,KAH+B,CAJzB;AASV4L,IAAAA,sCAAsC,EAAE,CAAC,iCAAD,CAT9B;AAUVC,IAAAA,wBAAwB,EAAE,CAAC,uCAAD,CAVhB;AAWVC,IAAAA,yBAAyB,EAAE,CACvB,iDADuB,CAXjB;AAcVC,IAAAA,oCAAoC,EAAE,CAClC,iCADkC,EAElC,EAFkC,EAGlC;AAAE/L,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,wCAAjB;AAAX,KAHkC,CAd5B;AAmBVgM,IAAAA,mCAAmC,EAAE,CAAC,8BAAD,CAnB3B;AAoBVC,IAAAA,qBAAqB,EAAE,CAAC,oCAAD,CApBb;AAqBVC,IAAAA,sBAAsB,EAAE,CAAC,8CAAD,CArBd;AAsBVC,IAAAA,iCAAiC,EAAE,CAC/B,8BAD+B,EAE/B,EAF+B,EAG/B;AAAEnM,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,qCAAjB;AAAX,KAH+B;AAtBzB,GA/ZA;AA2bdoM,EAAAA,MAAM,EAAE;AACJC,IAAAA,YAAY,EAAE,CACV,4DADU,CADV;AAIJC,IAAAA,SAAS,EAAE,CAAC,yDAAD,CAJP;AAKJC,IAAAA,sBAAsB,EAAE,CAAC,gDAAD,CALpB;AAMJxF,IAAAA,MAAM,EAAE,CAAC,mCAAD,CANJ;AAOJuC,IAAAA,aAAa,EAAE,CACX,2DADW,CAPX;AAUJkD,IAAAA,WAAW,EAAE,CAAC,mCAAD,CAVT;AAWJC,IAAAA,eAAe,EAAE,CAAC,uCAAD,CAXb;AAYJjD,IAAAA,aAAa,EAAE,CACX,2DADW,CAZX;AAeJkD,IAAAA,WAAW,EAAE,CAAC,4CAAD,CAfT;AAgBJC,IAAAA,eAAe,EAAE,CACb,4DADa,CAhBb;AAmBJ1F,IAAAA,GAAG,EAAE,CAAC,iDAAD,CAnBD;AAoBJyC,IAAAA,UAAU,EAAE,CAAC,wDAAD,CApBR;AAqBJkD,IAAAA,QAAQ,EAAE,CAAC,oDAAD,CArBN;AAsBJC,IAAAA,QAAQ,EAAE,CAAC,yCAAD,CAtBN;AAuBJC,IAAAA,YAAY,EAAE,CAAC,yDAAD,CAvBV;AAwBJlD,IAAAA,IAAI,EAAE,CAAC,aAAD,CAxBF;AAyBJmD,IAAAA,aAAa,EAAE,CAAC,qCAAD,CAzBX;AA0BJlD,IAAAA,YAAY,EAAE,CAAC,0DAAD,CA1BV;AA2BJmD,IAAAA,mBAAmB,EAAE,CAAC,2CAAD,CA3BjB;AA4BJC,IAAAA,UAAU,EAAE,CAAC,wDAAD,CA5BR;AA6BJC,IAAAA,iBAAiB,EAAE,CAAC,yCAAD,CA7Bf;AA8BJC,IAAAA,qBAAqB,EAAE,CACnB,0DADmB,EAEnB;AAAE7I,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,aAAD;AAAZ;AAAb,KAFmB,CA9BnB;AAkCJ6I,IAAAA,wBAAwB,EAAE,CAAC,kBAAD,CAlCtB;AAmCJC,IAAAA,UAAU,EAAE,CAAC,wBAAD,CAnCR;AAoCJC,IAAAA,WAAW,EAAE,CAAC,kCAAD,CApCT;AAqCJC,IAAAA,sBAAsB,EAAE,CACpB,gEADoB,CArCpB;AAwCJC,IAAAA,iBAAiB,EAAE,CAAC,kCAAD,CAxCf;AAyCJC,IAAAA,iBAAiB,EAAE,CACf,wDADe,CAzCf;AA4CJC,IAAAA,cAAc,EAAE,CAAC,sCAAD,CA5CZ;AA6CJC,IAAAA,IAAI,EAAE,CAAC,sDAAD,CA7CF;AA8CJC,IAAAA,eAAe,EAAE,CACb,2DADa,CA9Cb;AAiDJC,IAAAA,eAAe,EAAE,CACb,8DADa,CAjDb;AAoDJC,IAAAA,WAAW,EAAE,CACT,kEADS,CApDT;AAuDJC,IAAAA,SAAS,EAAE,CAAC,wDAAD,CAvDP;AAwDJC,IAAAA,MAAM,EAAE,CAAC,yDAAD,CAxDJ;AAyDJvG,IAAAA,MAAM,EAAE,CAAC,mDAAD,CAzDJ;AA0DJ4C,IAAAA,aAAa,EAAE,CAAC,0DAAD,CA1DX;AA2DJ4D,IAAAA,WAAW,EAAE,CAAC,2CAAD,CA3DT;AA4DJC,IAAAA,eAAe,EAAE,CACb,2DADa;AA5Db,GA3bM;AA2fdC,EAAAA,QAAQ,EAAE;AACNlH,IAAAA,GAAG,EAAE,CAAC,yBAAD,CADC;AAENmH,IAAAA,kBAAkB,EAAE,CAAC,eAAD,CAFd;AAGN3F,IAAAA,UAAU,EAAE,CAAC,mCAAD;AAHN,GA3fI;AAggBd4F,EAAAA,QAAQ,EAAE;AACNC,IAAAA,MAAM,EAAE,CAAC,gBAAD,CADF;AAENC,IAAAA,SAAS,EAAE,CACP,oBADO,EAEP;AAAEC,MAAAA,OAAO,EAAE;AAAE,wBAAgB;AAAlB;AAAX,KAFO;AAFL,GAhgBI;AAugBdC,EAAAA,IAAI,EAAE;AACFxH,IAAAA,GAAG,EAAE,CAAC,WAAD,CADH;AAEFyH,IAAAA,UAAU,EAAE,CAAC,cAAD,CAFV;AAGFC,IAAAA,MAAM,EAAE,CAAC,UAAD,CAHN;AAIFC,IAAAA,IAAI,EAAE,CAAC,OAAD;AAJJ,GAvgBQ;AA6gBdC,EAAAA,UAAU,EAAE;AACRC,IAAAA,YAAY,EAAE,CAAC,qCAAD,CADN;AAERC,IAAAA,iCAAiC,EAAE,CAC/B,gDAD+B,EAE/B;AAAEzK,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAF+B,CAF3B;AAMRyK,IAAAA,mBAAmB,EAAE,CACjB,sDADiB,EAEjB;AAAE1K,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFiB,CANb;AAUR0K,IAAAA,qBAAqB,EAAE,CACnB,mDADmB,EAEnB;AAAE3K,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFmB,CAVf;AAcR2K,IAAAA,8BAA8B,EAAE,CAC5B,6CAD4B,EAE5B;AAAE5K,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAF4B,CAdxB;AAkBR4K,IAAAA,gBAAgB,EAAE,CAAC,0CAAD,CAlBV;AAmBRC,IAAAA,eAAe,EAAE,CAAC,kCAAD,CAnBT;AAoBRC,IAAAA,aAAa,EAAE,CAAC,8CAAD,CApBP;AAqBRC,IAAAA,6BAA6B,EAAE,CAC3B,qCAD2B,EAE3B;AAAEhL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAF2B,CArBvB;AAyBRgL,IAAAA,eAAe,EAAE,CACb,2CADa,EAEb;AAAEjL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFa,CAzBT;AA6BR6I,IAAAA,wBAAwB,EAAE,CACtB,sBADsB,EAEtB;AAAE9I,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFsB,CA7BlB;AAiCR8I,IAAAA,UAAU,EAAE,CACR,4BADQ,EAER;AAAE/I,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFQ,CAjCJ;AAqCRiL,IAAAA,eAAe,EAAE,CACb,wDADa,EAEb;AAAElL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFa,CArCT;AAyCRkL,IAAAA,gBAAgB,EAAE,CACd,kDADc,EAEd;AAAEnL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFc,CAzCV;AA6CRmL,IAAAA,eAAe,EAAE,CAAC,wDAAD,CA7CT;AA8CRC,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CA9CV;AA+CRC,IAAAA,yBAAyB,EAAE,CAAC,uBAAD,CA/CnB;AAgDRC,IAAAA,WAAW,EAAE,CAAC,6BAAD,CAhDL;AAiDRC,IAAAA,WAAW,EAAE,CAAC,kCAAD,CAjDL;AAkDRC,IAAAA,8BAA8B,EAAE,CAC5B,+DAD4B,EAE5B;AAAEzL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAF4B,CAlDxB;AAsDRyL,IAAAA,gBAAgB,EAAE,CACd,qEADc,EAEd;AAAE1L,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFc,CAtDV;AA0DR0L,IAAAA,YAAY,EAAE,CAAC,oCAAD;AA1DN,GA7gBE;AAykBdC,EAAAA,IAAI,EAAE;AACFC,IAAAA,SAAS,EAAE,CAAC,mCAAD,CADT;AAEFC,IAAAA,gBAAgB,EAAE,CAAC,gDAAD,CAFhB;AAGFC,IAAAA,gBAAgB,EAAE,CAAC,mCAAD,CAHhB;AAIFC,IAAAA,sBAAsB,EAAE,CAAC,oCAAD,CAJtB;AAKFC,IAAAA,4BAA4B,EAAE,CAAC,2CAAD,CAL5B;AAMFC,IAAAA,kCAAkC,EAAE,CAChC,kDADgC,CANlC;AASFC,IAAAA,gBAAgB,EAAE,CAAC,8BAAD,CAThB;AAUFC,IAAAA,aAAa,EAAE,CAAC,wBAAD,CAVb;AAWFC,IAAAA,aAAa,EAAE,CAAC,oCAAD,CAXb;AAYF1J,IAAAA,GAAG,EAAE,CAAC,iBAAD,CAZH;AAaF2J,IAAAA,iCAAiC,EAAE,CAAC,kCAAD,CAbjC;AAcFC,IAAAA,oBAAoB,EAAE,CAAC,wCAAD,CAdpB;AAeFC,IAAAA,UAAU,EAAE,CAAC,iCAAD,CAfV;AAgBFC,IAAAA,sBAAsB,EAAE,CAAC,wCAAD,CAhBtB;AAiBFnH,IAAAA,IAAI,EAAE,CAAC,oBAAD,CAjBJ;AAkBFoH,IAAAA,oBAAoB,EAAE,CAAC,+BAAD,CAlBpB;AAmBFC,IAAAA,gBAAgB,EAAE,CAAC,wBAAD,CAnBhB;AAoBFC,IAAAA,qBAAqB,EAAE,CAAC,oCAAD,CApBrB;AAqBF9D,IAAAA,wBAAwB,EAAE,CAAC,gBAAD,CArBxB;AAsBFrD,IAAAA,WAAW,EAAE,CAAC,4BAAD,CAtBX;AAuBFoH,IAAAA,mBAAmB,EAAE,CAAC,mDAAD,CAvBnB;AAwBFC,IAAAA,WAAW,EAAE,CAAC,yBAAD,CAxBX;AAyBFC,IAAAA,mCAAmC,EAAE,CAAC,4BAAD,CAzBnC;AA0BFC,IAAAA,wBAAwB,EAAE,CAAC,uCAAD,CA1BxB;AA2BFC,IAAAA,sBAAsB,EAAE,CAAC,6BAAD,CA3BtB;AA4BFC,IAAAA,iBAAiB,EAAE,CAAC,gCAAD,CA5BjB;AA6BFC,IAAAA,YAAY,EAAE,CAAC,uBAAD,CA7BZ;AA8BFC,IAAAA,WAAW,EAAE,CAAC,wCAAD,CA9BX;AA+BFC,IAAAA,YAAY,EAAE,CAAC,uCAAD,CA/BZ;AAgCFC,IAAAA,uBAAuB,EAAE,CAAC,2CAAD,CAhCvB;AAiCFC,IAAAA,yBAAyB,EAAE,CACvB,qDADuB,CAjCzB;AAoCFC,IAAAA,0CAA0C,EAAE,CACxC,8CADwC,CApC1C;AAuCFC,IAAAA,oBAAoB,EAAE,CAAC,wCAAD,CAvCpB;AAwCFC,IAAAA,uCAAuC,EAAE,CACrC,2CADqC,CAxCvC;AA2CFC,IAAAA,WAAW,EAAE,CAAC,sCAAD,CA3CX;AA4CFxK,IAAAA,MAAM,EAAE,CAAC,mBAAD,CA5CN;AA6CFyK,IAAAA,oCAAoC,EAAE,CAClC,oCADkC,CA7CpC;AAgDFC,IAAAA,aAAa,EAAE,CAAC,mCAAD,CAhDb;AAiDFC,IAAAA,yBAAyB,EAAE,CAAC,0CAAD;AAjDzB,GAzkBQ;AA4nBdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,iCAAiC,EAAE,CAC/B,qDAD+B,CAD7B;AAINC,IAAAA,mBAAmB,EAAE,CACjB,2DADiB,CAJf;AAONC,IAAAA,wCAAwC,EAAE,CACtC,mFADsC,CAPpC;AAUNC,IAAAA,0BAA0B,EAAE,CACxB,yFADwB,CAVtB;AAaNC,IAAAA,4CAA4C,EAAE,CAC1C,iEAD0C,EAE1C,EAF0C,EAG1C;AAAE1S,MAAAA,OAAO,EAAE,CAAC,UAAD,EAAa,2CAAb;AAAX,KAH0C,CAbxC;AAkBN2S,IAAAA,2DAA2D,EAAE,CACzD,2DADyD,EAEzD,EAFyD,EAGzD;AACI3S,MAAAA,OAAO,EAAE,CACL,UADK,EAEL,yDAFK;AADb,KAHyD,CAlBvD;AA4BN4S,IAAAA,uDAAuD,EAAE,CACrD,2DADqD,CA5BnD;AA+BNC,IAAAA,yCAAyC,EAAE,CACvC,iEADuC,CA/BrC;AAkCNC,IAAAA,0CAA0C,EAAE,CACxC,uEADwC,CAlCtC;AAqCNC,IAAAA,8BAA8B,EAAE,CAC5B,kDAD4B,CArC1B;AAwCNC,IAAAA,yBAAyB,EAAE,CACvB,wDADuB,CAxCrB;AA2CNC,IAAAA,iBAAiB,EAAE,CACf,8DADe,CA3Cb;AA8CNC,IAAAA,qCAAqC,EAAE,CACnC,gFADmC,CA9CjC;AAiDNC,IAAAA,gCAAgC,EAAE,CAC9B,sFAD8B,CAjD5B;AAoDNC,IAAAA,wBAAwB,EAAE,CACtB,4FADsB,CApDpB;AAuDNC,IAAAA,kCAAkC,EAAE,CAChC,mEADgC,CAvD9B;AA0DNC,IAAAA,oBAAoB,EAAE,CAClB,yEADkB,CA1DhB;AA6DNC,IAAAA,yCAAyC,EAAE,CACvC,yFADuC,CA7DrC;AAgENC,IAAAA,2BAA2B,EAAE,CACzB,+FADyB;AAhEvB,GA5nBI;AAgsBdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,eAAe,EAAE,CACb,qDADa,EAEb;AAAEpP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFa,CADX;AAKNoP,IAAAA,UAAU,EAAE,CACR,0CADQ,EAER;AAAErP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CALN;AASNqP,IAAAA,YAAY,EAAE,CACV,qCADU,EAEV;AAAEtP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFU,CATR;AAaNsP,IAAAA,0BAA0B,EAAE,CACxB,qBADwB,EAExB;AAAEvP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFwB,CAbtB;AAiBNuP,IAAAA,YAAY,EAAE,CACV,2BADU,EAEV;AAAExP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFU,CAjBR;AAqBNwP,IAAAA,aAAa,EAAE,CACX,qCADW,EAEX;AAAEzP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFW,CArBT;AAyBNgF,IAAAA,MAAM,EAAE,CACJ,+BADI,EAEJ;AAAEjF,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFI,CAzBF;AA6BNyP,IAAAA,UAAU,EAAE,CACR,0CADQ,EAER;AAAE1P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CA7BN;AAiCN0P,IAAAA,YAAY,EAAE,CACV,sCADU,EAEV;AAAE3P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFU,CAjCR;AAqCN0C,IAAAA,GAAG,EAAE,CACD,4BADC,EAED;AAAE3C,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFC,CArCC;AAyCN2P,IAAAA,OAAO,EAAE,CACL,uCADK,EAEL;AAAE5P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFK,CAzCH;AA6CN4P,IAAAA,SAAS,EAAE,CACP,mCADO,EAEP;AAAE7P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFO,CA7CL;AAiDN6P,IAAAA,oBAAoB,EAAE,CAClB,gEADkB,EAElB;AAAE9P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFkB,CAjDhB;AAqDN8P,IAAAA,SAAS,EAAE,CACP,yCADO,EAEP;AAAE/P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFO,CArDL;AAyDN+P,IAAAA,iBAAiB,EAAE,CACf,0CADe,EAEf;AAAEhQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFe,CAzDb;AA6DNgQ,IAAAA,WAAW,EAAE,CACT,oCADS,EAET;AAAEjQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFS,CA7DP;AAiEN8I,IAAAA,UAAU,EAAE,CACR,0BADQ,EAER;AAAE/I,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CAjEN;AAqEN+I,IAAAA,WAAW,EAAE,CACT,oCADS,EAET;AAAEhJ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFS,CArEP;AAyENwF,IAAAA,WAAW,EAAE,CACT,gCADS,EAET;AAAEzF,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFS,CAzEP;AA6ENiQ,IAAAA,QAAQ,EAAE,CACN,8CADM,EAEN;AAAElQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFM,CA7EJ;AAiFNkQ,IAAAA,UAAU,EAAE,CACR,0CADQ,EAER;AAAEnQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CAjFN;AAqFNmQ,IAAAA,kBAAkB,EAAE,CAChB,wDADgB,EAEhB;AAAEpQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFgB,CArFd;AAyFNkD,IAAAA,MAAM,EAAE,CACJ,8BADI,EAEJ;AAAEnD,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFI,CAzFF;AA6FNoQ,IAAAA,UAAU,EAAE,CACR,yCADQ,EAER;AAAErQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CA7FN;AAiGNqQ,IAAAA,YAAY,EAAE,CACV,qCADU,EAEV;AAAEtQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFU;AAjGR,GAhsBI;AAsyBdsQ,EAAAA,KAAK,EAAE;AACHC,IAAAA,aAAa,EAAE,CAAC,qDAAD,CADZ;AAEH/N,IAAAA,MAAM,EAAE,CAAC,kCAAD,CAFL;AAGHgO,IAAAA,2BAA2B,EAAE,CACzB,8EADyB,CAH1B;AAMHC,IAAAA,YAAY,EAAE,CAAC,wDAAD,CANX;AAOHC,IAAAA,mBAAmB,EAAE,CACjB,yDADiB,CAPlB;AAUHC,IAAAA,mBAAmB,EAAE,CACjB,sEADiB,CAVlB;AAaHC,IAAAA,mBAAmB,EAAE,CACjB,0DADiB,CAblB;AAgBHC,IAAAA,aAAa,EAAE,CACX,8EADW,CAhBZ;AAmBHnO,IAAAA,GAAG,EAAE,CAAC,+CAAD,CAnBF;AAoBHoO,IAAAA,SAAS,EAAE,CACP,mEADO,CApBR;AAuBHC,IAAAA,gBAAgB,EAAE,CAAC,uDAAD,CAvBf;AAwBH1L,IAAAA,IAAI,EAAE,CAAC,iCAAD,CAxBH;AAyBH2L,IAAAA,qBAAqB,EAAE,CACnB,4EADmB,CAzBpB;AA4BHzL,IAAAA,WAAW,EAAE,CAAC,uDAAD,CA5BV;AA6BH0L,IAAAA,SAAS,EAAE,CAAC,qDAAD,CA7BR;AA8BHC,IAAAA,sBAAsB,EAAE,CACpB,mEADoB,CA9BrB;AAiCHC,IAAAA,kBAAkB,EAAE,CAChB,wDADgB,CAjCjB;AAoCHC,IAAAA,yBAAyB,EAAE,CAAC,0CAAD,CApCxB;AAqCHC,IAAAA,WAAW,EAAE,CAAC,uDAAD,CArCV;AAsCHC,IAAAA,KAAK,EAAE,CAAC,qDAAD,CAtCJ;AAuCHC,IAAAA,wBAAwB,EAAE,CACtB,sEADsB,CAvCvB;AA0CHC,IAAAA,gBAAgB,EAAE,CACd,oEADc,CA1Cf;AA6CHC,IAAAA,YAAY,EAAE,CACV,2EADU,CA7CX;AAgDHvO,IAAAA,MAAM,EAAE,CAAC,iDAAD,CAhDL;AAiDHwO,IAAAA,YAAY,EAAE,CACV,6DADU,EAEV;AAAE3R,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAFU,CAjDX;AAqDH2R,IAAAA,YAAY,EAAE,CACV,mEADU,CArDX;AAwDHC,IAAAA,mBAAmB,EAAE,CACjB,yDADiB;AAxDlB,GAtyBO;AAk2BdC,EAAAA,SAAS,EAAE;AAAEnP,IAAAA,GAAG,EAAE,CAAC,iBAAD;AAAP,GAl2BG;AAm2BdoP,EAAAA,SAAS,EAAE;AACPC,IAAAA,sBAAsB,EAAE,CACpB,4DADoB,EAEpB;AAAEhS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFoB,CADjB;AAKPgS,IAAAA,cAAc,EAAE,CACZ,4DADY,EAEZ;AAAEjS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFY,CALT;AASPiS,IAAAA,qBAAqB,EAAE,CACnB,mEADmB,EAEnB;AAAElS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFmB,CAThB;AAaPkS,IAAAA,iCAAiC,EAAE,CAC/B,kEAD+B,EAE/B;AAAEnS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF+B,CAb5B;AAiBPmS,IAAAA,mCAAmC,EAAE,CACjC,wGADiC,EAEjC;AAAEpS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFiC,CAjB9B;AAqBPoS,IAAAA,4BAA4B,EAAE,CAC1B,8EAD0B,EAE1B;AAAErS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF0B,CArBvB;AAyBPqS,IAAAA,sBAAsB,EAAE,CACpB,4EADoB,EAEpB;AAAEtS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFoB,CAzBjB;AA6BPsS,IAAAA,cAAc,EAAE,CACZ,4EADY,EAEZ;AAAEvS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFY,CA7BT;AAiCPuS,IAAAA,qBAAqB,EAAE,CACnB,mFADmB,EAEnB;AAAExS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFmB,CAjChB;AAqCPwS,IAAAA,2BAA2B,EAAE,CACzB,kFADyB,EAEzB;AAAEzS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFyB,CArCtB;AAyCPyS,IAAAA,uBAAuB,EAAE,CACrB,8FADqB,EAErB;AAAE1S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFqB,CAzClB;AA6CP0S,IAAAA,8BAA8B,EAAE,CAC5B,wHAD4B,EAE5B;AAAE3S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF4B,CA7CzB;AAiDP2S,IAAAA,YAAY,EAAE,CACV,iCADU,EAEV;AAAE5S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFU,EAGV;AACI4S,MAAAA,UAAU,EAAE;AADhB,KAHU,CAjDP;AAwDPC,IAAAA,oBAAoB,EAAE,CAClB,2DADkB,EAElB;AAAE9S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFkB,CAxDf;AA4DP8S,IAAAA,YAAY,EAAE,CACV,2DADU,EAEV;AAAE/S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFU,CA5DP;AAgEP+S,IAAAA,mBAAmB,EAAE,CACjB,kEADiB,EAEjB;AAAEhT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFiB,CAhEd;AAoEPgT,IAAAA,+BAA+B,EAAE,CAC7B,iEAD6B,EAE7B;AAAEjT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF6B,CApE1B;AAwEPiT,IAAAA,iCAAiC,EAAE,CAC/B,uGAD+B,EAE/B;AAAElT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF+B,CAxE5B;AA4EPkT,IAAAA,0BAA0B,EAAE,CACxB,6EADwB,EAExB;AAAEnT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFwB;AA5ErB,GAn2BG;AAo7BdmT,EAAAA,KAAK,EAAE;AACHC,IAAAA,gBAAgB,EAAE,CAAC,oDAAD,CADf;AAEHC,IAAAA,wBAAwB,EAAE,CACtB,2EADsB,EAEtB,EAFsB,EAGtB;AAAEC,MAAAA,SAAS,EAAE;AAAb,KAHsB,CAFvB;AAOHnE,IAAAA,eAAe,EAAE,CAAC,oDAAD,CAPd;AAQHoE,IAAAA,sBAAsB,EAAE,CACpB,yFADoB,EAEpB,EAFoB,EAGpB;AAAED,MAAAA,SAAS,EAAE;AAAb,KAHoB,CARrB;AAaHE,IAAAA,yBAAyB,EAAE,CACvB,4EADuB,EAEvB,EAFuB,EAGvB;AAAEF,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAbxB;AAkBHG,IAAAA,yBAAyB,EAAE,CACvB,4EADuB,EAEvB,EAFuB,EAGvB;AAAEH,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAlBxB;AAuBHI,IAAAA,iBAAiB,EAAE,CAAC,oDAAD,CAvBhB;AAwBHC,IAAAA,wBAAwB,EAAE,CACtB,gDADsB,EAEtB;AAAE5T,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAFsB,CAxBvB;AA4BH4T,IAAAA,cAAc,EAAE,CAAC,mDAAD,CA5Bb;AA6BHC,IAAAA,mBAAmB,EAAE,CACjB,0DADiB,CA7BlB;AAgCHC,IAAAA,+BAA+B,EAAE,CAC7B,6EAD6B,EAE7B;AAAE/T,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAF6B,CAhC9B;AAoCH+T,IAAAA,kBAAkB,EAAE,CAAC,2CAAD,CApCjB;AAqCHC,IAAAA,eAAe,EAAE,CAAC,iCAAD,CArCd;AAsCHC,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CAtCf;AAuCHC,IAAAA,sBAAsB,EAAE,CACpB,iEADoB,CAvCrB;AA0CHC,IAAAA,mBAAmB,EAAE,CAAC,uCAAD,CA1ClB;AA2CH7E,IAAAA,0BAA0B,EAAE,CAAC,kBAAD,CA3CzB;AA4CH8E,IAAAA,UAAU,EAAE,CAAC,kCAAD,CA5CT;AA6CHC,IAAAA,WAAW,EAAE,CAAC,wBAAD,CA7CV;AA8CHC,IAAAA,yBAAyB,EAAE,CACvB,2DADuB,CA9CxB;AAiDHC,IAAAA,0BAA0B,EAAE,CAAC,2CAAD,CAjDzB;AAkDHC,IAAAA,eAAe,EAAE,CACb,kCADa,EAEb;AAAEzU,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,YAAD;AAAZ;AAAb,KAFa,CAlDd;AAsDHyU,IAAAA,aAAa,EAAE,CAAC,qCAAD,CAtDZ;AAuDHC,IAAAA,mBAAmB,EAAE,CACjB,uDADiB,EAEjB;AAAE3U,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,UAAD;AAAZ;AAAb,KAFiB,CAvDlB;AA2DHmM,IAAAA,aAAa,EAAE,CAAC,kCAAD,CA3DZ;AA4DHwI,IAAAA,iBAAiB,EAAE,CAAC,qDAAD,CA5DhB;AA6DH3P,IAAAA,MAAM,EAAE,CAAC,8BAAD,CA7DL;AA8DH4P,IAAAA,wBAAwB,EAAE,CACtB,wEADsB,CA9DvB;AAiEHC,IAAAA,2BAA2B,EAAE,CACzB,0EADyB,CAjE1B;AAoEHC,IAAAA,mBAAmB,EAAE,CACjB,8DADiB,CApElB;AAuEHC,IAAAA,sBAAsB,EAAE,CACpB,2DADoB,CAvErB;AA0EHC,IAAAA,mBAAmB,EAAE,CAAC,oDAAD,CA1ElB;AA2EHC,IAAAA,+BAA+B,EAAE,CAC7B,+EAD6B,EAE7B;AAAElV,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAF6B,CA3E9B;AA+EHkV,IAAAA,eAAe,EAAE,CAAC,4CAAD,CA/Ed;AAgFHC,IAAAA,gBAAgB,EAAE,CACd,0DADc,CAhFf;AAmFHC,IAAAA,UAAU,EAAE,CAAC,8CAAD,CAnFT;AAoFHC,IAAAA,gBAAgB,EAAE,CACd,0DADc,CApFf;AAuFHC,IAAAA,eAAe,EAAE,CACb,oCADa,EAEb;AAAEvV,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,YAAD;AAAZ;AAAb,KAFa,CAvFd;AA2FHuV,IAAAA,iCAAiC,EAAE,CAC/B,yFAD+B,CA3FhC;AA8FHC,IAAAA,aAAa,EAAE,CAAC,oDAAD,CA9FZ;AA+FHC,IAAAA,kBAAkB,EAAE,CAChB,yDADgB,CA/FjB;AAkGHrJ,IAAAA,aAAa,EAAE,CAAC,8CAAD,CAlGZ;AAmGHsJ,IAAAA,6BAA6B,EAAE,CAC3B,uDAD2B,EAE3B;AAAE3V,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAF2B,CAnG5B;AAuGH2V,IAAAA,0BAA0B,EAAE,CACxB,mDADwB,EAExB;AAAE5V,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAFwB,CAvGzB;AA2GH4V,IAAAA,eAAe,EAAE,CACb,yCADa,EAEb,EAFa,EAGb;AAAEna,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,wBAAV;AAAX,KAHa,CA3Gd;AAgHHoa,IAAAA,sBAAsB,EAAE,CAAC,yCAAD,CAhHrB;AAiHHC,IAAAA,sBAAsB,EAAE,CAAC,yCAAD,CAjHrB;AAkHHC,IAAAA,4BAA4B,EAAE,CAC1B,oDAD0B,EAE1B;AAAEhW,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAF0B,CAlH3B;AAsHHgW,IAAAA,yBAAyB,EAAE,CACvB,gDADuB,EAEvB;AAAEjW,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAFuB,CAtHxB;AA0HH0C,IAAAA,GAAG,EAAE,CAAC,2BAAD,CA1HF;AA2HHuT,IAAAA,qBAAqB,EAAE,CACnB,qEADmB,CA3HpB;AA8HHC,IAAAA,wBAAwB,EAAE,CACtB,uEADsB,CA9HvB;AAiIHC,IAAAA,kBAAkB,EAAE,CAAC,wCAAD,CAjIjB;AAkIHC,IAAAA,yBAAyB,EAAE,CACvB,wFADuB,CAlIxB;AAqIHC,IAAAA,YAAY,EAAE,CACV,kCADU,EAEV;AAAEtW,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAFU,CArIX;AAyIHsW,IAAAA,kCAAkC,EAAE,CAChC,0EADgC,CAzIjC;AA4IHC,IAAAA,SAAS,EAAE,CAAC,6CAAD,CA5IR;AA6IHC,IAAAA,mBAAmB,EAAE,CACjB,wDADiB,CA7IlB;AAgJHC,IAAAA,SAAS,EAAE,CAAC,0CAAD,CAhJR;AAiJHC,IAAAA,qBAAqB,EAAE,CAAC,gDAAD,CAjJpB;AAkJHC,IAAAA,8BAA8B,EAAE,CAC5B,+DAD4B,CAlJ7B;AAqJHC,IAAAA,uBAAuB,EAAE,CAAC,gDAAD,CArJtB;AAsJHrQ,IAAAA,SAAS,EAAE,CAAC,yCAAD,CAtJR;AAuJHsQ,IAAAA,sBAAsB,EAAE,CAAC,iDAAD,CAvJrB;AAwJHC,IAAAA,gBAAgB,EAAE,CAAC,iDAAD,CAxJf;AAyJHC,IAAAA,4BAA4B,EAAE,CAC1B,4EAD0B,EAE1B;AAAEhX,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAF0B,CAzJ3B;AA6JHgX,IAAAA,0BAA0B,EAAE,CAAC,6CAAD,CA7JzB;AA8JHC,IAAAA,UAAU,EAAE,CAAC,2CAAD,CA9JT;AA+JHC,IAAAA,oBAAoB,EAAE,CAAC,8CAAD,CA/JnB;AAgKHC,IAAAA,YAAY,EAAE,CAAC,yCAAD,CAhKX;AAiKHC,IAAAA,aAAa,EAAE,CAAC,uDAAD,CAjKZ;AAkKHC,IAAAA,mBAAmB,EAAE,CACjB,4EADiB,CAlKlB;AAqKHC,IAAAA,cAAc,EAAE,CACZ,2DADY,CArKb;AAwKHC,IAAAA,mBAAmB,EAAE,CAAC,+CAAD,CAxKlB;AAyKHC,IAAAA,gBAAgB,EAAE,CAAC,2CAAD,CAzKf;AA0KHC,IAAAA,QAAQ,EAAE,CAAC,iCAAD,CA1KP;AA2KHC,IAAAA,aAAa,EAAE,CAAC,mDAAD,CA3KZ;AA4KHC,IAAAA,qBAAqB,EAAE,CAAC,+CAAD,CA5KpB;AA6KHC,IAAAA,8BAA8B,EAAE,CAC5B,sFAD4B,CA7K7B;AAgLHC,IAAAA,iBAAiB,EAAE,CAAC,4CAAD,CAhLhB;AAiLHC,IAAAA,SAAS,EAAE,CAAC,kCAAD,CAjLR;AAkLHC,IAAAA,oBAAoB,EAAE,CAAC,wCAAD,CAlLnB;AAmLHC,IAAAA,UAAU,EAAE,CAAC,iDAAD,CAnLT;AAoLHC,IAAAA,eAAe,EAAE,CAAC,sDAAD,CApLd;AAqLHC,IAAAA,eAAe,EAAE,CAAC,+CAAD,CArLd;AAsLHC,IAAAA,yBAAyB,EAAE,CACvB,+EADuB,CAtLxB;AAyLHC,IAAAA,mCAAmC,EAAE,CACjC,2EADiC,CAzLlC;AA4LHC,IAAAA,WAAW,EAAE,CAAC,iDAAD,CA5LV;AA6LHC,IAAAA,eAAe,EAAE,CAAC,qDAAD,CA7Ld;AA8LHC,IAAAA,mCAAmC,EAAE,CACjC,2EADiC,CA9LlC;AAiMHC,IAAAA,QAAQ,EAAE,CAAC,yCAAD,CAjMP;AAkMHjM,IAAAA,UAAU,EAAE,CAAC,2CAAD,CAlMT;AAmMHkM,IAAAA,uBAAuB,EAAE,CACrB,kDADqB,CAnMtB;AAsMHC,IAAAA,YAAY,EAAE,CAAC,oCAAD,CAtMX;AAuMHC,IAAAA,yBAAyB,EAAE,CACvB,oEADuB,EAEvB;AAAE5Y,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAFuB,CAvMxB;AA2MH+P,IAAAA,iBAAiB,EAAE,CAAC,yCAAD,CA3MhB;AA4MH6I,IAAAA,qBAAqB,EAAE,CACnB,yDADmB,CA5MpB;AA+MHC,IAAAA,yBAAyB,EAAE,CAAC,oCAAD,CA/MxB;AAgNHC,IAAAA,wBAAwB,EAAE,CACtB,kDADsB,CAhNvB;AAmNHvT,IAAAA,WAAW,EAAE,CAAC,mCAAD,CAnNV;AAoNHwT,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CApNf;AAqNHC,IAAAA,cAAc,EAAE,CAAC,gCAAD,CArNb;AAsNHC,IAAAA,sBAAsB,EAAE,CACpB,gEADoB,CAtNrB;AAyNHC,IAAAA,eAAe,EAAE,CAAC,uCAAD,CAzNd;AA0NHrQ,IAAAA,wBAAwB,EAAE,CAAC,iBAAD,CA1NvB;AA2NHC,IAAAA,UAAU,EAAE,CAAC,uBAAD,CA3NT;AA4NHtD,IAAAA,WAAW,EAAE,CAAC,6BAAD,CA5NV;AA6NHC,IAAAA,SAAS,EAAE,CAAC,iCAAD,CA7NR;AA8NH0T,IAAAA,eAAe,EAAE,CAAC,uCAAD,CA9Nd;AA+NHC,IAAAA,mCAAmC,EAAE,CAAC,kCAAD,CA/NlC;AAgOHC,IAAAA,aAAa,EAAE,CAAC,qCAAD,CAhOZ;AAiOHC,IAAAA,eAAe,EAAE,CAAC,wCAAD,CAjOd;AAkOH5T,IAAAA,UAAU,EAAE,CAAC,mBAAD,CAlOT;AAmOH6T,IAAAA,oCAAoC,EAAE,CAClC,sDADkC,EAElC;AAAExZ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAFkC,CAnOnC;AAuOHwZ,IAAAA,iBAAiB,EAAE,CACf,wDADe,CAvOhB;AA0OHC,IAAAA,YAAY,EAAE,CAAC,oCAAD,CA1OX;AA2OHC,IAAAA,QAAQ,EAAE,CAAC,gCAAD,CA3OP;AA4OHC,IAAAA,SAAS,EAAE,CAAC,iCAAD,CA5OR;AA6OHzM,IAAAA,YAAY,EAAE,CAAC,iCAAD,CA7OX;AA8OHoE,IAAAA,KAAK,EAAE,CAAC,mCAAD,CA9OJ;AA+OHnE,IAAAA,WAAW,EAAE,CAAC,kDAAD,CA/OV;AAgPHyM,IAAAA,2BAA2B,EAAE,CACzB,6EADyB,EAEzB,EAFyB,EAGzB;AAAEtG,MAAAA,SAAS,EAAE;AAAb,KAHyB,CAhP1B;AAqPHnD,IAAAA,kBAAkB,EAAE,CAChB,uDADgB,CArPjB;AAwPH0J,IAAAA,yBAAyB,EAAE,CACvB,2FADuB,EAEvB,EAFuB,EAGvB;AAAEvG,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAxPxB;AA6PHwG,IAAAA,2BAA2B,EAAE,CACzB,kFADyB,CA7P1B;AAgQHC,IAAAA,4BAA4B,EAAE,CAC1B,8EAD0B,EAE1B,EAF0B,EAG1B;AAAEzG,MAAAA,SAAS,EAAE;AAAb,KAH0B,CAhQ3B;AAqQH0G,IAAAA,4BAA4B,EAAE,CAC1B,8EAD0B,EAE1B,EAF0B,EAG1B;AAAE1G,MAAAA,SAAS,EAAE;AAAb,KAH0B,CArQ3B;AA0QH2G,IAAAA,YAAY,EAAE,CAAC,qDAAD,CA1QX;AA2QHC,IAAAA,gBAAgB,EAAE,CACd,kCADc,EAEd;AAAEna,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAFc,CA3Qf;AA+QHma,IAAAA,iBAAiB,EAAE,CAAC,yCAAD,CA/QhB;AAgRHC,IAAAA,wBAAwB,EAAE,CACtB,wEADsB,CAhRvB;AAmRHC,IAAAA,wBAAwB,EAAE,CACtB,0EADsB,EAEtB,EAFsB,EAGtB;AAAE/G,MAAAA,SAAS,EAAE;AAAb,KAHsB,CAnRvB;AAwRHgH,IAAAA,sBAAsB,EAAE,CACpB,wFADoB,EAEpB,EAFoB,EAGpB;AAAEhH,MAAAA,SAAS,EAAE;AAAb,KAHoB,CAxRrB;AA6RHiH,IAAAA,yBAAyB,EAAE,CACvB,2EADuB,EAEvB,EAFuB,EAGvB;AAAEjH,MAAAA,SAAS,EAAE;AAAb,KAHuB,CA7RxB;AAkSHkH,IAAAA,yBAAyB,EAAE,CACvB,2EADuB,EAEvB,EAFuB,EAGvB;AAAElH,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAlSxB;AAuSHmH,IAAAA,eAAe,EAAE,CAAC,kDAAD,CAvSd;AAwSHC,IAAAA,QAAQ,EAAE,CAAC,qCAAD,CAxSP;AAySHxX,IAAAA,MAAM,EAAE,CAAC,6BAAD,CAzSL;AA0SHyX,IAAAA,sBAAsB,EAAE,CACpB,wDADoB,CA1SrB;AA6SHC,IAAAA,mBAAmB,EAAE,CAAC,mDAAD,CA7SlB;AA8SHC,IAAAA,+BAA+B,EAAE,CAAC,iCAAD,CA9S9B;AA+SHC,IAAAA,gBAAgB,EAAE,CACd,yDADc,CA/Sf;AAkTHC,IAAAA,iCAAiC,EAAE,CAC/B,wFAD+B,CAlThC;AAqTHC,IAAAA,aAAa,EAAE,CAAC,mDAAD,CArTZ;AAsTHC,IAAAA,kBAAkB,EAAE,CAChB,wDADgB,CAtTjB;AAyTHC,IAAAA,0BAA0B,EAAE,CACxB,iFADwB,EAExB,EAFwB,EAGxB;AAAEzf,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,6BAAV;AAAX,KAHwB,CAzTzB;AA8TH0f,IAAAA,2BAA2B,EAAE,CACzB,iFADyB,CA9T1B;AAiUHvN,IAAAA,aAAa,EAAE,CAAC,6CAAD,CAjUZ;AAkUHwN,IAAAA,0BAA0B,EAAE,CACxB,oDADwB,CAlUzB;AAqUHC,IAAAA,kBAAkB,EAAE,CAChB,sEADgB,EAEhB;AAAEC,MAAAA,OAAO,EAAE;AAAX,KAFgB;AArUjB,GAp7BO;AA8vCdC,EAAAA,MAAM,EAAE;AACJC,IAAAA,IAAI,EAAE,CAAC,kBAAD,CADF;AAEJC,IAAAA,OAAO,EAAE,CAAC,qBAAD,EAAwB;AAAE1b,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAAxB,CAFL;AAGJ0b,IAAAA,qBAAqB,EAAE,CAAC,oBAAD,CAHnB;AAIJC,IAAAA,MAAM,EAAE,CAAC,oBAAD,CAJJ;AAKJxI,IAAAA,KAAK,EAAE,CAAC,0BAAD,CALH;AAMJyI,IAAAA,MAAM,EAAE,CAAC,oBAAD,EAAuB;AAAE7b,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAAvB,CANJ;AAOJ6b,IAAAA,KAAK,EAAE,CAAC,mBAAD;AAPH,GA9vCM;AAuwCdC,EAAAA,cAAc,EAAE;AACZzY,IAAAA,QAAQ,EAAE,CACN,iEADM,CADE;AAIZK,IAAAA,iBAAiB,EAAE,CAAC,kDAAD,CAJP;AAKZG,IAAAA,WAAW,EAAE,CACT,mEADS;AALD,GAvwCF;AAgxCdkY,EAAAA,KAAK,EAAE;AACHC,IAAAA,iCAAiC,EAAE,CAC/B,0DAD+B,CADhC;AAIHC,IAAAA,kCAAkC,EAAE,CAChC,yDADgC,EAEhC;AAAElc,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFgC,CAJjC;AAQHkc,IAAAA,+BAA+B,EAAE,CAC7B,wDAD6B,CAR9B;AAWHC,IAAAA,+BAA+B,EAAE,CAC7B,yDAD6B,EAE7B;AAAEpc,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAF6B,CAX9B;AAeHoc,IAAAA,4BAA4B,EAAE,CAC1B,wDAD0B,CAf3B;AAkBH5Z,IAAAA,MAAM,EAAE,CAAC,wBAAD,CAlBL;AAmBH6Z,IAAAA,4BAA4B,EAAE,CAC1B,6EAD0B,CAnB3B;AAsBHC,IAAAA,qBAAqB,EAAE,CAAC,gDAAD,CAtBpB;AAuBHC,IAAAA,4BAA4B,EAAE,CAC1B,gGAD0B,CAvB3B;AA0BHC,IAAAA,qBAAqB,EAAE,CACnB,sEADmB,CA1BpB;AA6BHC,IAAAA,WAAW,EAAE,CAAC,sCAAD,CA7BV;AA8BHC,IAAAA,SAAS,EAAE,CAAC,mCAAD,CA9BR;AA+BHC,IAAAA,yBAAyB,EAAE,CACvB,6FADuB,CA/BxB;AAkCHC,IAAAA,kBAAkB,EAAE,CAChB,mEADgB,CAlCjB;AAqCHC,IAAAA,yBAAyB,EAAE,CACvB,0DADuB,CArCxB;AAwCHxX,IAAAA,IAAI,EAAE,CAAC,uBAAD,CAxCH;AAyCHyX,IAAAA,cAAc,EAAE,CAAC,yCAAD,CAzCb;AA0CHC,IAAAA,2BAA2B,EAAE,CACzB,4EADyB,CA1C1B;AA6CHC,IAAAA,oBAAoB,EAAE,CAAC,+CAAD,CA7CnB;AA8CHnU,IAAAA,wBAAwB,EAAE,CAAC,iBAAD,CA9CvB;AA+CHoU,IAAAA,gBAAgB,EAAE,CAAC,2CAAD,CA/Cf;AAgDHC,IAAAA,2BAA2B,EAAE,CACzB,+CADyB,CAhD1B;AAmDHC,IAAAA,iBAAiB,EAAE,CACf,4CADe,EAEf;AAAEpd,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFe,CAnDhB;AAuDHod,IAAAA,cAAc,EAAE,CAAC,yCAAD,CAvDb;AAwDHC,IAAAA,4BAA4B,EAAE,CAC1B,6DAD0B,CAxD3B;AA2DHC,IAAAA,kBAAkB,EAAE,CAChB,4DADgB,CA3DjB;AA8DHC,IAAAA,eAAe,EAAE,CACb,2DADa,CA9Dd;AAiEHC,IAAAA,4BAA4B,EAAE,CAC1B,+FAD0B,CAjE3B;AAoEHC,IAAAA,qBAAqB,EAAE,CACnB,qEADmB,CApEpB;AAuEHC,IAAAA,WAAW,EAAE,CAAC,qCAAD;AAvEV,GAhxCO;AAy1Cd7B,EAAAA,KAAK,EAAE;AACH8B,IAAAA,wBAAwB,EAAE,CAAC,mBAAD,CADvB;AAEHC,IAAAA,KAAK,EAAE,CAAC,6BAAD,CAFJ;AAGHC,IAAAA,YAAY,EAAE,CAAC,6BAAD,CAHX;AAIHC,IAAAA,qBAAqB,EAAE,CAAC,+CAAD,CAJpB;AAKHC,IAAAA,oCAAoC,EAAE,CAAC,gCAAD,CALnC;AAMHC,IAAAA,4BAA4B,EAAE,CAAC,qBAAD,CAN3B;AAOHC,IAAAA,kCAAkC,EAAE,CAAC,iBAAD,CAPjC;AAQHC,IAAAA,2BAA2B,EAAE,CAAC,qBAAD,CAR1B;AASHC,IAAAA,4BAA4B,EAAE,CAAC,oCAAD,CAT3B;AAUHC,IAAAA,kCAAkC,EAAE,CAAC,4BAAD,CAVjC;AAWHC,IAAAA,MAAM,EAAE,CAAC,gCAAD,CAXL;AAYH/d,IAAAA,gBAAgB,EAAE,CAAC,WAAD,CAZf;AAaHge,IAAAA,aAAa,EAAE,CAAC,uBAAD,CAbZ;AAcHC,IAAAA,iBAAiB,EAAE,CAAC,iCAAD,CAdhB;AAeHC,IAAAA,yBAAyB,EAAE,CAAC,iCAAD,CAfxB;AAgBHC,IAAAA,+BAA+B,EAAE,CAAC,yBAAD,CAhB9B;AAiBHpZ,IAAAA,IAAI,EAAE,CAAC,YAAD,CAjBH;AAkBHqZ,IAAAA,0BAA0B,EAAE,CAAC,kBAAD,CAlBzB;AAmBHC,IAAAA,0BAA0B,EAAE,CAAC,kBAAD,CAnBzB;AAoBHC,IAAAA,2BAA2B,EAAE,CAAC,qBAAD,CApB1B;AAqBHC,IAAAA,iCAAiC,EAAE,CAAC,qBAAD,CArBhC;AAsBHC,IAAAA,oBAAoB,EAAE,CAAC,iCAAD,CAtBnB;AAuBHC,IAAAA,oBAAoB,EAAE,CAAC,iCAAD,CAvBnB;AAwBHC,IAAAA,2BAA2B,EAAE,CAAC,oBAAD,CAxB1B;AAyBHC,IAAAA,kBAAkB,EAAE,CAAC,gCAAD,CAzBjB;AA0BHC,IAAAA,gCAAgC,EAAE,CAAC,yBAAD,CA1B/B;AA2BHC,IAAAA,qBAAqB,EAAE,CAAC,4BAAD,CA3BpB;AA4BHC,IAAAA,iCAAiC,EAAE,CAAC,gBAAD,CA5BhC;AA6BHC,IAAAA,yCAAyC,EAAE,CAAC,8BAAD,CA7BxC;AA8BHC,IAAAA,OAAO,EAAE,CAAC,gCAAD,CA9BN;AA+BHC,IAAAA,QAAQ,EAAE,CAAC,mCAAD,CA/BP;AAgCHC,IAAAA,mBAAmB,EAAE,CAAC,aAAD;AAhClB;AAz1CO,CAAlB;;ACAO,MAAMC,OAAO,GAAG,mBAAhB;;ACAA,SAASC,kBAAT,CAA4BC,OAA5B,EAAqCC,YAArC,EAAmD;AACtD,QAAMC,UAAU,GAAG,EAAnB;;AACA,OAAK,MAAM,CAACC,KAAD,EAAQC,SAAR,CAAX,IAAiCC,MAAM,CAACC,OAAP,CAAeL,YAAf,CAAjC,EAA+D;AAC3D,SAAK,MAAM,CAACM,UAAD,EAAaC,QAAb,CAAX,IAAqCH,MAAM,CAACC,OAAP,CAAeF,SAAf,CAArC,EAAgE;AAC5D,YAAM,CAACK,KAAD,EAAQC,QAAR,EAAkBC,WAAlB,IAAiCH,QAAvC;AACA,YAAM,CAACI,MAAD,EAASC,GAAT,IAAgBJ,KAAK,CAACK,KAAN,CAAY,GAAZ,CAAtB;AACA,YAAMC,gBAAgB,GAAGV,MAAM,CAACW,MAAP,CAAc;AAAEJ,QAAAA,MAAF;AAAUC,QAAAA;AAAV,OAAd,EAA+BH,QAA/B,CAAzB;;AACA,UAAI,CAACR,UAAU,CAACC,KAAD,CAAf,EAAwB;AACpBD,QAAAA,UAAU,CAACC,KAAD,CAAV,GAAoB,EAApB;AACH;;AACD,YAAMc,YAAY,GAAGf,UAAU,CAACC,KAAD,CAA/B;;AACA,UAAIQ,WAAJ,EAAiB;AACbM,QAAAA,YAAY,CAACV,UAAD,CAAZ,GAA2BW,QAAQ,CAAClB,OAAD,EAAUG,KAAV,EAAiBI,UAAjB,EAA6BQ,gBAA7B,EAA+CJ,WAA/C,CAAnC;AACA;AACH;;AACDM,MAAAA,YAAY,CAACV,UAAD,CAAZ,GAA2BP,OAAO,CAACmB,OAAR,CAAgBT,QAAhB,CAAyBK,gBAAzB,CAA3B;AACH;AACJ;;AACD,SAAOb,UAAP;AACH;;AACD,SAASgB,QAAT,CAAkBlB,OAAlB,EAA2BG,KAA3B,EAAkCI,UAAlC,EAA8CG,QAA9C,EAAwDC,WAAxD,EAAqE;AACjE,QAAMS,mBAAmB,GAAGpB,OAAO,CAACmB,OAAR,CAAgBT,QAAhB,CAAyBA,QAAzB,CAA5B;AACA;;AACA,WAASW,eAAT,CAAyB,GAAGC,IAA5B,EAAkC;AAC9B;AACA,QAAIC,OAAO,GAAGH,mBAAmB,CAACZ,QAApB,CAA6B7O,KAA7B,CAAmC,GAAG2P,IAAtC,CAAd,CAF8B;;AAI9B,QAAIX,WAAW,CAAChN,SAAhB,EAA2B;AACvB4N,MAAAA,OAAO,GAAGlB,MAAM,CAACW,MAAP,CAAc,EAAd,EAAkBO,OAAlB,EAA2B;AACjCC,QAAAA,IAAI,EAAED,OAAO,CAACZ,WAAW,CAAChN,SAAb,CADoB;AAEjC,SAACgN,WAAW,CAAChN,SAAb,GAAyB8N;AAFQ,OAA3B,CAAV;AAIA,aAAOL,mBAAmB,CAACG,OAAD,CAA1B;AACH;;AACD,QAAIZ,WAAW,CAAC7kB,OAAhB,EAAyB;AACrB,YAAM,CAAC4lB,QAAD,EAAWC,aAAX,IAA4BhB,WAAW,CAAC7kB,OAA9C;AACAkkB,MAAAA,OAAO,CAAC4B,GAAR,CAAYC,IAAZ,CAAkB,WAAU1B,KAAM,IAAGI,UAAW,kCAAiCmB,QAAS,IAAGC,aAAc,IAA3G;AACH;;AACD,QAAIhB,WAAW,CAAC1N,UAAhB,EAA4B;AACxB+M,MAAAA,OAAO,CAAC4B,GAAR,CAAYC,IAAZ,CAAiBlB,WAAW,CAAC1N,UAA7B;AACH;;AACD,QAAI0N,WAAW,CAAChd,iBAAhB,EAAmC;AAC/B;AACA,YAAM4d,OAAO,GAAGH,mBAAmB,CAACZ,QAApB,CAA6B7O,KAA7B,CAAmC,GAAG2P,IAAtC,CAAhB;;AACA,WAAK,MAAM,CAACQ,IAAD,EAAOC,KAAP,CAAX,IAA4B1B,MAAM,CAACC,OAAP,CAAeK,WAAW,CAAChd,iBAA3B,CAA5B,EAA2E;AACvE,YAAIme,IAAI,IAAIP,OAAZ,EAAqB;AACjBvB,UAAAA,OAAO,CAAC4B,GAAR,CAAYC,IAAZ,CAAkB,IAAGC,IAAK,0CAAyC3B,KAAM,IAAGI,UAAW,aAAYwB,KAAM,WAAzG;;AACA,cAAI,EAAEA,KAAK,IAAIR,OAAX,CAAJ,EAAyB;AACrBA,YAAAA,OAAO,CAACQ,KAAD,CAAP,GAAiBR,OAAO,CAACO,IAAD,CAAxB;AACH;;AACD,iBAAOP,OAAO,CAACO,IAAD,CAAd;AACH;AACJ;;AACD,aAAOV,mBAAmB,CAACG,OAAD,CAA1B;AACH,KA/B6B;;;AAiC9B,WAAOH,mBAAmB,CAAC,GAAGE,IAAJ,CAA1B;AACH;;AACD,SAAOjB,MAAM,CAACW,MAAP,CAAcK,eAAd,EAA+BD,mBAA/B,CAAP;AACH;;ACxDM,SAASY,mBAAT,CAA6BhC,OAA7B,EAAsC;AACzC,QAAMiC,GAAG,GAAGlC,kBAAkB,CAACC,OAAD,EAAUkC,SAAV,CAA9B;AACA,2CACOD,GADP;AAEIE,IAAAA,IAAI,EAAEF;AAFV;AAIH;AACDD,mBAAmB,CAAClC,OAApB,GAA8BA,OAA9B;;;;"} \ No newline at end of file diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js deleted file mode 100644 index 32c2f39a..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js +++ /dev/null @@ -1,60 +0,0 @@ -export function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ method, url }, defaults); - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - const scopeMethods = newMethods[scope]; - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); - } - } - return newMethods; -} -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); - // There are currently no other decorations than `.mapToData` - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: undefined, - }); - return requestWithDefaults(options); - } - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); - } - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - if (!(alias in options)) { - options[alias] = options[name]; - } - delete options[name]; - } - } - return requestWithDefaults(options); - } - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - return requestWithDefaults(...args); - } - return Object.assign(withDecorations, requestWithDefaults); -} diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js deleted file mode 100644 index 2b65b015..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js +++ /dev/null @@ -1,1405 +0,0 @@ -const Endpoints = { - actions: { - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", - ], - cancelWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel", - ], - createOrUpdateEnvironmentSecret: [ - "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", - ], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", - ], - createRegistrationTokenForOrg: [ - "POST /orgs/{org}/actions/runners/registration-token", - ], - createRegistrationTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/registration-token", - ], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/remove-token", - ], - createWorkflowDispatch: [ - "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches", - ], - deleteArtifact: [ - "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}", - ], - deleteEnvironmentSecret: [ - "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", - ], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", - ], - deleteSelfHostedRunnerFromOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}", - ], - deleteSelfHostedRunnerFromRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}", - ], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: [ - "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs", - ], - disableSelectedRepositoryGithubActionsOrganization: [ - "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}", - ], - disableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable", - ], - downloadArtifact: [ - "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}", - ], - downloadJobLogsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", - ], - downloadWorkflowRunLogs: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs", - ], - enableSelectedRepositoryGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}", - ], - enableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable", - ], - getAllowedActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/selected-actions", - ], - getAllowedActionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/selected-actions", - ], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getEnvironmentPublicKey: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key", - ], - getEnvironmentSecret: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", - ], - getGithubActionsPermissionsOrganization: [ - "GET /orgs/{org}/actions/permissions", - ], - getGithubActionsPermissionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions", - ], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getPendingDeploymentsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", - ], - getRepoPermissions: [ - "GET /repos/{owner}/{repo}/actions/permissions", - {}, - { renamed: ["actions", "getGithubActionsPermissionsRepository"] }, - ], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getReviewsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals", - ], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/{runner_id}", - ], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunUsage: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing", - ], - getWorkflowUsage: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing", - ], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets", - ], - listJobsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", - ], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/downloads", - ], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", - ], - listSelectedRepositoriesEnabledGithubActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/repositories", - ], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", - ], - listWorkflowRuns: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", - ], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", - ], - reviewPendingDeploymentsForRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", - ], - setAllowedActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/selected-actions", - ], - setAllowedActionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions", - ], - setGithubActionsPermissionsOrganization: [ - "PUT /orgs/{org}/actions/permissions", - ], - setGithubActionsPermissionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions", - ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories", - ], - setSelectedRepositoriesEnabledGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories", - ], - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: [ - "DELETE /notifications/threads/{thread_id}/subscription", - ], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: [ - "GET /notifications/threads/{thread_id}/subscription", - ], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: [ - "GET /users/{username}/events/orgs/{org}", - ], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: [ - "GET /users/{username}/received_events/public", - ], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/notifications", - ], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: [ - "PUT /notifications/threads/{thread_id}/subscription", - ], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"], - }, - apps: { - addRepoToInstallation: [ - "PUT /user/installations/{installation_id}/repositories/{repository_id}", - ], - checkToken: ["POST /applications/{client_id}/token"], - createContentAttachment: [ - "POST /content_references/{content_reference_id}/attachments", - { mediaType: { previews: ["corsair"] } }, - ], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: [ - "POST /app/installations/{installation_id}/access_tokens", - ], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: [ - "GET /marketplace_listing/accounts/{account_id}", - ], - getSubscriptionPlanForAccountStubbed: [ - "GET /marketplace_listing/stubbed/accounts/{account_id}", - ], - getUserInstallation: ["GET /users/{username}/installation"], - getWebhookConfigForApp: ["GET /app/hook/config"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: [ - "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", - ], - listInstallationReposForAuthenticatedUser: [ - "GET /user/installations/{installation_id}/repositories", - ], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: [ - "GET /user/marketplace_purchases/stubbed", - ], - removeRepoFromInstallation: [ - "DELETE /user/installations/{installation_id}/repositories/{repository_id}", - ], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - scopeToken: ["POST /applications/{client_id}/token/scoped"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: [ - "DELETE /app/installations/{installation_id}/suspended", - ], - updateWebhookConfigForApp: ["PATCH /app/hook/config"], - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: [ - "GET /users/{username}/settings/billing/actions", - ], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: [ - "GET /users/{username}/settings/billing/packages", - ], - getSharedStorageBillingOrg: [ - "GET /orgs/{org}/settings/billing/shared-storage", - ], - getSharedStorageBillingUser: [ - "GET /users/{username}/settings/billing/shared-storage", - ], - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs"], - createSuite: ["POST /repos/{owner}/{repo}/check-suites"], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: [ - "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", - ], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: [ - "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", - ], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestSuite: [ - "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", - ], - setSuitesPreferences: [ - "PATCH /repos/{owner}/{repo}/check-suites/preferences", - ], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"], - }, - codeScanning: { - deleteAnalysis: [ - "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}", - ], - getAlert: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", - {}, - { renamedParameters: { alert_id: "alert_number" } }, - ], - getAnalysis: [ - "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", - ], - getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", - ], - listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", - ], - uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"], - }, - codesOfConduct: { - getAllCodesOfConduct: [ - "GET /codes_of_conduct", - { mediaType: { previews: ["scarlet-witch"] } }, - ], - getConductCode: [ - "GET /codes_of_conduct/{key}", - { mediaType: { previews: ["scarlet-witch"] } }, - ], - getForRepo: [ - "GET /repos/{owner}/{repo}/community/code_of_conduct", - { mediaType: { previews: ["scarlet-witch"] } }, - ], - }, - emojis: { get: ["GET /emojis"] }, - enterpriseAdmin: { - disableSelectedOrganizationGithubActionsEnterprise: [ - "DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}", - ], - enableSelectedOrganizationGithubActionsEnterprise: [ - "PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}", - ], - getAllowedActionsEnterprise: [ - "GET /enterprises/{enterprise}/actions/permissions/selected-actions", - ], - getGithubActionsPermissionsEnterprise: [ - "GET /enterprises/{enterprise}/actions/permissions", - ], - listSelectedOrganizationsEnabledGithubActionsEnterprise: [ - "GET /enterprises/{enterprise}/actions/permissions/organizations", - ], - setAllowedActionsEnterprise: [ - "PUT /enterprises/{enterprise}/actions/permissions/selected-actions", - ], - setGithubActionsPermissionsEnterprise: [ - "PUT /enterprises/{enterprise}/actions/permissions", - ], - setSelectedOrganizationsEnabledGithubActionsEnterprise: [ - "PUT /enterprises/{enterprise}/actions/permissions/organizations", - ], - }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"], - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"], - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"], - }, - interactions: { - getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: [ - "GET /user/interaction-limits", - {}, - { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] }, - ], - removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: [ - "DELETE /repos/{owner}/{repo}/interaction-limits", - ], - removeRestrictionsForYourPublicRepos: [ - "DELETE /user/interaction-limits", - {}, - { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] }, - ], - setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: [ - "PUT /user/interaction-limits", - {}, - { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] }, - ], - }, - issues: { - addAssignees: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees", - ], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/comments", - ], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: [ - "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}", - ], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: [ - "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}", - ], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", - { mediaType: { previews: ["mockingbird"] } }, - ], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: [ - "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", - ], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", - ], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels", - ], - removeAssignees: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees", - ], - removeLabel: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}", - ], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: [ - "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}", - ], - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"], - }, - markdown: { - render: ["POST /markdown"], - renderRaw: [ - "POST /markdown/raw", - { headers: { "content-type": "text/plain; charset=utf-8" } }, - ], - }, - meta: { - get: ["GET /meta"], - getOctocat: ["GET /octocat"], - getZen: ["GET /zen"], - root: ["GET /"], - }, - migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: [ - "DELETE /user/migrations/{migration_id}/archive", - { mediaType: { previews: ["wyandotte"] } }, - ], - deleteArchiveForOrg: [ - "DELETE /orgs/{org}/migrations/{migration_id}/archive", - { mediaType: { previews: ["wyandotte"] } }, - ], - downloadArchiveForOrg: [ - "GET /orgs/{org}/migrations/{migration_id}/archive", - { mediaType: { previews: ["wyandotte"] } }, - ], - getArchiveForAuthenticatedUser: [ - "GET /user/migrations/{migration_id}/archive", - { mediaType: { previews: ["wyandotte"] } }, - ], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: [ - "GET /user/migrations/{migration_id}", - { mediaType: { previews: ["wyandotte"] } }, - ], - getStatusForOrg: [ - "GET /orgs/{org}/migrations/{migration_id}", - { mediaType: { previews: ["wyandotte"] } }, - ], - listForAuthenticatedUser: [ - "GET /user/migrations", - { mediaType: { previews: ["wyandotte"] } }, - ], - listForOrg: [ - "GET /orgs/{org}/migrations", - { mediaType: { previews: ["wyandotte"] } }, - ], - listReposForOrg: [ - "GET /orgs/{org}/migrations/{migration_id}/repositories", - { mediaType: { previews: ["wyandotte"] } }, - ], - listReposForUser: [ - "GET /user/migrations/{migration_id}/repositories", - { mediaType: { previews: ["wyandotte"] } }, - ], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: [ - "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", - { mediaType: { previews: ["wyandotte"] } }, - ], - unlockRepoForOrg: [ - "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", - { mediaType: { previews: ["wyandotte"] } }, - ], - updateImport: ["PATCH /repos/{owner}/{repo}/import"], - }, - orgs: { - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: [ - "PUT /orgs/{org}/outside_collaborators/{username}", - ], - createInvitation: ["POST /orgs/{org}/invitations"], - createWebhook: ["POST /orgs/{org}/hooks"], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - get: ["GET /orgs/{org}"], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations"], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: [ - "DELETE /orgs/{org}/outside_collaborators/{username}", - ], - removePublicMembershipForAuthenticatedUser: [ - "DELETE /orgs/{org}/public_members/{username}", - ], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: [ - "PUT /orgs/{org}/public_members/{username}", - ], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: [ - "PATCH /user/memberships/orgs/{org}", - ], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], - updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"], - }, - packages: { - deletePackageForAuthenticatedUser: [ - "DELETE /user/packages/{package_type}/{package_name}", - ], - deletePackageForOrg: [ - "DELETE /orgs/{org}/packages/{package_type}/{package_name}", - ], - deletePackageVersionForAuthenticatedUser: [ - "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}", - ], - deletePackageVersionForOrg: [ - "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", - ], - getAllPackageVersionsForAPackageOwnedByAnOrg: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", - {}, - { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] }, - ], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions", - {}, - { - renamed: [ - "packages", - "getAllPackageVersionsForPackageOwnedByAuthenticatedUser", - ], - }, - ], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions", - ], - getAllPackageVersionsForPackageOwnedByOrg: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", - ], - getAllPackageVersionsForPackageOwnedByUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}/versions", - ], - getPackageForAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}", - ], - getPackageForOrganization: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}", - ], - getPackageForUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}", - ], - getPackageVersionForAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}", - ], - getPackageVersionForOrganization: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", - ], - getPackageVersionForUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", - ], - restorePackageForAuthenticatedUser: [ - "POST /user/packages/{package_type}/{package_name}/restore{?token}", - ], - restorePackageForOrg: [ - "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}", - ], - restorePackageVersionForAuthenticatedUser: [ - "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", - ], - restorePackageVersionForOrg: [ - "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", - ], - }, - projects: { - addCollaborator: [ - "PUT /projects/{project_id}/collaborators/{username}", - { mediaType: { previews: ["inertia"] } }, - ], - createCard: [ - "POST /projects/columns/{column_id}/cards", - { mediaType: { previews: ["inertia"] } }, - ], - createColumn: [ - "POST /projects/{project_id}/columns", - { mediaType: { previews: ["inertia"] } }, - ], - createForAuthenticatedUser: [ - "POST /user/projects", - { mediaType: { previews: ["inertia"] } }, - ], - createForOrg: [ - "POST /orgs/{org}/projects", - { mediaType: { previews: ["inertia"] } }, - ], - createForRepo: [ - "POST /repos/{owner}/{repo}/projects", - { mediaType: { previews: ["inertia"] } }, - ], - delete: [ - "DELETE /projects/{project_id}", - { mediaType: { previews: ["inertia"] } }, - ], - deleteCard: [ - "DELETE /projects/columns/cards/{card_id}", - { mediaType: { previews: ["inertia"] } }, - ], - deleteColumn: [ - "DELETE /projects/columns/{column_id}", - { mediaType: { previews: ["inertia"] } }, - ], - get: [ - "GET /projects/{project_id}", - { mediaType: { previews: ["inertia"] } }, - ], - getCard: [ - "GET /projects/columns/cards/{card_id}", - { mediaType: { previews: ["inertia"] } }, - ], - getColumn: [ - "GET /projects/columns/{column_id}", - { mediaType: { previews: ["inertia"] } }, - ], - getPermissionForUser: [ - "GET /projects/{project_id}/collaborators/{username}/permission", - { mediaType: { previews: ["inertia"] } }, - ], - listCards: [ - "GET /projects/columns/{column_id}/cards", - { mediaType: { previews: ["inertia"] } }, - ], - listCollaborators: [ - "GET /projects/{project_id}/collaborators", - { mediaType: { previews: ["inertia"] } }, - ], - listColumns: [ - "GET /projects/{project_id}/columns", - { mediaType: { previews: ["inertia"] } }, - ], - listForOrg: [ - "GET /orgs/{org}/projects", - { mediaType: { previews: ["inertia"] } }, - ], - listForRepo: [ - "GET /repos/{owner}/{repo}/projects", - { mediaType: { previews: ["inertia"] } }, - ], - listForUser: [ - "GET /users/{username}/projects", - { mediaType: { previews: ["inertia"] } }, - ], - moveCard: [ - "POST /projects/columns/cards/{card_id}/moves", - { mediaType: { previews: ["inertia"] } }, - ], - moveColumn: [ - "POST /projects/columns/{column_id}/moves", - { mediaType: { previews: ["inertia"] } }, - ], - removeCollaborator: [ - "DELETE /projects/{project_id}/collaborators/{username}", - { mediaType: { previews: ["inertia"] } }, - ], - update: [ - "PATCH /projects/{project_id}", - { mediaType: { previews: ["inertia"] } }, - ], - updateCard: [ - "PATCH /projects/columns/cards/{card_id}", - { mediaType: { previews: ["inertia"] } }, - ], - updateColumn: [ - "PATCH /projects/columns/{column_id}", - { mediaType: { previews: ["inertia"] } }, - ], - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", - ], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments", - ], - deletePendingReview: [ - "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", - ], - deleteReviewComment: [ - "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}", - ], - dismissReview: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals", - ], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", - ], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", - ], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", - ], - listReviewComments: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", - ], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: [ - "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", - ], - requestReviewers: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", - ], - submitReview: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events", - ], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", - { mediaType: { previews: ["lydian"] } }, - ], - updateReview: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", - ], - updateReviewComment: [ - "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}", - ], - }, - rateLimit: { get: ["GET /rate_limit"] }, - reactions: { - createForCommitComment: [ - "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - createForIssue: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - createForIssueComment: [ - "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - createForPullRequestReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - createForTeamDiscussionCommentInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - createForTeamDiscussionInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteForCommitComment: [ - "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteForIssue: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteForIssueComment: [ - "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteForPullRequestComment: [ - "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteForTeamDiscussion: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteForTeamDiscussionComment: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteLegacy: [ - "DELETE /reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - { - deprecated: "octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy", - }, - ], - listForCommitComment: [ - "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - listForIssue: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - listForIssueComment: [ - "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - listForPullRequestReviewComment: [ - "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - listForTeamDiscussionCommentInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - listForTeamDiscussionInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - }, - repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" }, - ], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" }, - ], - addTeamAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" }, - ], - addUserAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" }, - ], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: [ - "GET /repos/{owner}/{repo}/vulnerability-alerts", - { mediaType: { previews: ["dorian"] } }, - ], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - createCommitComment: [ - "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments", - ], - createCommitSignatureProtection: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", - { mediaType: { previews: ["zzzax"] } }, - ], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: [ - "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", - ], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateEnvironment: [ - "PUT /repos/{owner}/{repo}/environments/{environment_name}", - ], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: [ - "POST /repos/{owner}/{repo}/pages", - { mediaType: { previews: ["switcheroo"] } }, - ], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createUsingTemplate: [ - "POST /repos/{template_owner}/{template_repo}/generate", - { mediaType: { previews: ["baptiste"] } }, - ], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", - ], - deleteAdminBranchProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", - ], - deleteAnEnvironment: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}", - ], - deleteBranchProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection", - ], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", - { mediaType: { previews: ["zzzax"] } }, - ], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: [ - "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}", - ], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: [ - "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}", - ], - deletePagesSite: [ - "DELETE /repos/{owner}/{repo}/pages", - { mediaType: { previews: ["switcheroo"] } }, - ], - deletePullRequestReviewProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", - ], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: [ - "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}", - ], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: [ - "DELETE /repos/{owner}/{repo}/automated-security-fixes", - { mediaType: { previews: ["london"] } }, - ], - disableVulnerabilityAlerts: [ - "DELETE /repos/{owner}/{repo}/vulnerability-alerts", - { mediaType: { previews: ["dorian"] } }, - ], - downloadArchive: [ - "GET /repos/{owner}/{repo}/zipball/{ref}", - {}, - { renamed: ["repos", "downloadZipballArchive"] }, - ], - downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], - downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: [ - "PUT /repos/{owner}/{repo}/automated-security-fixes", - { mediaType: { previews: ["london"] } }, - ], - enableVulnerabilityAlerts: [ - "PUT /repos/{owner}/{repo}/vulnerability-alerts", - { mediaType: { previews: ["dorian"] } }, - ], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", - ], - getAdminBranchProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", - ], - getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - ], - getAllTopics: [ - "GET /repos/{owner}/{repo}/topics", - { mediaType: { previews: ["mercy"] } }, - ], - getAppsWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - ], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection", - ], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: [ - "GET /repos/{owner}/{repo}/collaborators/{username}/permission", - ], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", - { mediaType: { previews: ["zzzax"] } }, - ], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: [ - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}", - ], - getEnvironment: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}", - ], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", - ], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", - ], - getTeamsWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - ], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - ], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/config", - ], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", - { mediaType: { previews: ["groot"] } }, - ], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", - ], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: [ - "GET /repos/{owner}/{repo}/commits/{ref}/statuses", - ], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: [ - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", - ], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", - { mediaType: { previews: ["groot"] } }, - ], - listReleaseAssets: [ - "GET /repos/{owner}/{repo}/releases/{release_id}/assets", - ], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - removeAppAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" }, - ], - removeCollaborator: [ - "DELETE /repos/{owner}/{repo}/collaborators/{username}", - ], - removeStatusCheckContexts: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" }, - ], - removeStatusCheckProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", - ], - removeTeamAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" }, - ], - removeUserAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" }, - ], - renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], - replaceAllTopics: [ - "PUT /repos/{owner}/{repo}/topics", - { mediaType: { previews: ["mercy"] } }, - ], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", - ], - setAppAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" }, - ], - setStatusCheckContexts: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" }, - ], - setTeamAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" }, - ], - setUserAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" }, - ], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection", - ], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: [ - "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}", - ], - updatePullRequestReviewProtection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", - ], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: [ - "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}", - ], - updateStatusCheckPotection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", - {}, - { renamed: ["repos", "updateStatusCheckProtection"] }, - ], - updateStatusCheckProtection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", - ], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: [ - "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config", - ], - uploadReleaseAsset: [ - "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", - { baseUrl: "https://uploads.github.com" }, - ], - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits", { mediaType: { previews: ["cloak"] } }], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics", { mediaType: { previews: ["mercy"] } }], - users: ["GET /search/users"], - }, - secretScanning: { - getAlert: [ - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", - ], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", - ], - }, - teams: { - addOrUpdateMembershipForUserInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}", - ], - addOrUpdateProjectPermissionsInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", - { mediaType: { previews: ["inertia"] } }, - ], - addOrUpdateRepoPermissionsInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", - ], - checkPermissionsForProjectInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", - { mediaType: { previews: ["inertia"] } }, - ], - checkPermissionsForRepoInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", - ], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", - ], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", - ], - deleteDiscussionInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", - ], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", - ], - getDiscussionInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", - ], - getMembershipForUserInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/memberships/{username}", - ], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", - ], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/invitations", - ], - listProjectsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/projects", - { mediaType: { previews: ["inertia"] } }, - ], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}", - ], - removeProjectInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}", - ], - removeRepoInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", - ], - updateDiscussionCommentInOrg: [ - "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", - ], - updateDiscussionInOrg: [ - "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", - ], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"], - }, - users: { - addEmailForAuthenticated: ["POST /user/emails"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], - list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"], - }, -}; -export default Endpoints; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/method-types.js b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/method-types.js deleted file mode 100644 index cb0ff5c3..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/method-types.js +++ /dev/null @@ -1 +0,0 @@ -export {}; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/parameters-and-response-types.js b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/parameters-and-response-types.js deleted file mode 100644 index cb0ff5c3..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/parameters-and-response-types.js +++ /dev/null @@ -1 +0,0 @@ -export {}; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js deleted file mode 100644 index 41399060..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js +++ /dev/null @@ -1,11 +0,0 @@ -import ENDPOINTS from "./generated/endpoints"; -import { VERSION } from "./version"; -import { endpointsToMethods } from "./endpoints-to-methods"; -export function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, ENDPOINTS); - return { - ...api, - rest: api, - }; -} -restEndpointMethods.VERSION = VERSION; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/types.js b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/types.js deleted file mode 100644 index cb0ff5c3..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/types.js +++ /dev/null @@ -1 +0,0 @@ -export {}; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js deleted file mode 100644 index 5e3bbc5f..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js +++ /dev/null @@ -1 +0,0 @@ -export const VERSION = "4.15.1"; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/endpoints-to-methods.d.ts b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/endpoints-to-methods.d.ts deleted file mode 100644 index 2a97a4b4..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/endpoints-to-methods.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import { Octokit } from "@octokit/core"; -import { EndpointsDefaultsAndDecorations } from "./types"; -import { RestEndpointMethods } from "./generated/method-types"; -export declare function endpointsToMethods(octokit: Octokit, endpointsMap: EndpointsDefaultsAndDecorations): RestEndpointMethods; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/endpoints.d.ts b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/endpoints.d.ts deleted file mode 100644 index a3c1d92a..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/endpoints.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { EndpointsDefaultsAndDecorations } from "../types"; -declare const Endpoints: EndpointsDefaultsAndDecorations; -export default Endpoints; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts deleted file mode 100644 index fb0644af..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts +++ /dev/null @@ -1,7826 +0,0 @@ -import { EndpointInterface, RequestInterface } from "@octokit/types"; -import { RestEndpointMethodTypes } from "./parameters-and-response-types"; -export declare type RestEndpointMethods = { - actions: { - /** - * Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. - */ - addSelectedRepoToOrgSecret: { - (params?: RestEndpointMethodTypes["actions"]["addSelectedRepoToOrgSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. - */ - cancelWorkflowRun: { - (params?: RestEndpointMethodTypes["actions"]["cancelWorkflowRun"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates or updates an environment secret with an encrypted value. Encrypt your secret using - * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access - * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use - * this endpoint. - * - * #### Example encrypting a secret using Node.js - * - * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. - * - * ``` - * const sodium = require('tweetsodium'); - * - * const key = "base64-encoded-public-key"; - * const value = "plain-text-secret"; - * - * // Convert the message and key to Uint8Array's (Buffer implements that interface) - * const messageBytes = Buffer.from(value); - * const keyBytes = Buffer.from(key, 'base64'); - * - * // Encrypt using LibSodium. - * const encryptedBytes = sodium.seal(messageBytes, keyBytes); - * - * // Base64 the encrypted secret - * const encrypted = Buffer.from(encryptedBytes).toString('base64'); - * - * console.log(encrypted); - * ``` - * - * - * #### Example encrypting a secret using Python - * - * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/stable/public/#nacl-public-sealedbox) with Python 3. - * - * ``` - * from base64 import b64encode - * from nacl import encoding, public - * - * def encrypt(public_key: str, secret_value: str) -> str: - * """Encrypt a Unicode string using the public key.""" - * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) - * sealed_box = public.SealedBox(public_key) - * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) - * return b64encode(encrypted).decode("utf-8") - * ``` - * - * #### Example encrypting a secret using C# - * - * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. - * - * ``` - * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); - * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); - * - * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); - * - * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); - * ``` - * - * #### Example encrypting a secret using Ruby - * - * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. - * - * ```ruby - * require "rbnacl" - * require "base64" - * - * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") - * public_key = RbNaCl::PublicKey.new(key) - * - * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) - * encrypted_secret = box.encrypt("my_secret") - * - * # Print the base64 encoded secret - * puts Base64.strict_encode64(encrypted_secret) - * ``` - */ - createOrUpdateEnvironmentSecret: { - (params?: RestEndpointMethodTypes["actions"]["createOrUpdateEnvironmentSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates or updates an organization secret with an encrypted value. Encrypt your secret using - * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access - * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to - * use this endpoint. - * - * #### Example encrypting a secret using Node.js - * - * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. - * - * ``` - * const sodium = require('tweetsodium'); - * - * const key = "base64-encoded-public-key"; - * const value = "plain-text-secret"; - * - * // Convert the message and key to Uint8Array's (Buffer implements that interface) - * const messageBytes = Buffer.from(value); - * const keyBytes = Buffer.from(key, 'base64'); - * - * // Encrypt using LibSodium. - * const encryptedBytes = sodium.seal(messageBytes, keyBytes); - * - * // Base64 the encrypted secret - * const encrypted = Buffer.from(encryptedBytes).toString('base64'); - * - * console.log(encrypted); - * ``` - * - * - * #### Example encrypting a secret using Python - * - * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/stable/public/#nacl-public-sealedbox) with Python 3. - * - * ``` - * from base64 import b64encode - * from nacl import encoding, public - * - * def encrypt(public_key: str, secret_value: str) -> str: - * """Encrypt a Unicode string using the public key.""" - * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) - * sealed_box = public.SealedBox(public_key) - * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) - * return b64encode(encrypted).decode("utf-8") - * ``` - * - * #### Example encrypting a secret using C# - * - * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. - * - * ``` - * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); - * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); - * - * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); - * - * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); - * ``` - * - * #### Example encrypting a secret using Ruby - * - * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. - * - * ```ruby - * require "rbnacl" - * require "base64" - * - * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") - * public_key = RbNaCl::PublicKey.new(key) - * - * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) - * encrypted_secret = box.encrypt("my_secret") - * - * # Print the base64 encoded secret - * puts Base64.strict_encode64(encrypted_secret) - * ``` - */ - createOrUpdateOrgSecret: { - (params?: RestEndpointMethodTypes["actions"]["createOrUpdateOrgSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates or updates a repository secret with an encrypted value. Encrypt your secret using - * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access - * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use - * this endpoint. - * - * #### Example encrypting a secret using Node.js - * - * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. - * - * ``` - * const sodium = require('tweetsodium'); - * - * const key = "base64-encoded-public-key"; - * const value = "plain-text-secret"; - * - * // Convert the message and key to Uint8Array's (Buffer implements that interface) - * const messageBytes = Buffer.from(value); - * const keyBytes = Buffer.from(key, 'base64'); - * - * // Encrypt using LibSodium. - * const encryptedBytes = sodium.seal(messageBytes, keyBytes); - * - * // Base64 the encrypted secret - * const encrypted = Buffer.from(encryptedBytes).toString('base64'); - * - * console.log(encrypted); - * ``` - * - * - * #### Example encrypting a secret using Python - * - * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/stable/public/#nacl-public-sealedbox) with Python 3. - * - * ``` - * from base64 import b64encode - * from nacl import encoding, public - * - * def encrypt(public_key: str, secret_value: str) -> str: - * """Encrypt a Unicode string using the public key.""" - * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) - * sealed_box = public.SealedBox(public_key) - * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) - * return b64encode(encrypted).decode("utf-8") - * ``` - * - * #### Example encrypting a secret using C# - * - * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. - * - * ``` - * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); - * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); - * - * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); - * - * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); - * ``` - * - * #### Example encrypting a secret using Ruby - * - * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. - * - * ```ruby - * require "rbnacl" - * require "base64" - * - * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") - * public_key = RbNaCl::PublicKey.new(key) - * - * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) - * encrypted_secret = box.encrypt("my_secret") - * - * # Print the base64 encoded secret - * puts Base64.strict_encode64(encrypted_secret) - * ``` - */ - createOrUpdateRepoSecret: { - (params?: RestEndpointMethodTypes["actions"]["createOrUpdateRepoSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns a token that you can pass to the `config` script. The token expires after one hour. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - * - * #### Example using registration token - * - * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. - * - * ``` - * ./config.sh --url https://github.com/octo-org --token TOKEN - * ``` - */ - createRegistrationTokenForOrg: { - (params?: RestEndpointMethodTypes["actions"]["createRegistrationTokenForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns a token that you can pass to the `config` script. The token expires after one hour. You must authenticate - * using an access token with the `repo` scope to use this endpoint. - * - * #### Example using registration token - * - * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. - * - * ``` - * ./config.sh --url https://github.com/octo-org/octo-repo-artifacts --token TOKEN - * ``` - */ - createRegistrationTokenForRepo: { - (params?: RestEndpointMethodTypes["actions"]["createRegistrationTokenForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an organization. The token expires after one hour. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - * - * #### Example using remove token - * - * To remove your self-hosted runner from an organization, replace `TOKEN` with the remove token provided by this - * endpoint. - * - * ``` - * ./config.sh remove --token TOKEN - * ``` - */ - createRemoveTokenForOrg: { - (params?: RestEndpointMethodTypes["actions"]["createRemoveTokenForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns a token that you can pass to remove a self-hosted runner from a repository. The token expires after one hour. - * You must authenticate using an access token with the `repo` scope to use this endpoint. - * - * #### Example using remove token - * - * To remove your self-hosted runner from a repository, replace TOKEN with the remove token provided by this endpoint. - * - * ``` - * ./config.sh remove --token TOKEN - * ``` - */ - createRemoveTokenForRepo: { - (params?: RestEndpointMethodTypes["actions"]["createRemoveTokenForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. - * - * You must configure your GitHub Actions workflow to run when the [`workflow_dispatch` webhook](/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch) event occurs. The `inputs` are configured in the workflow file. For more information about how to configure the `workflow_dispatch` event in the workflow file, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows#workflow_dispatch)." - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Creating a personal access token for the command line](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line)." - */ - createWorkflowDispatch: { - (params?: RestEndpointMethodTypes["actions"]["createWorkflowDispatch"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes an artifact for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. - */ - deleteArtifact: { - (params?: RestEndpointMethodTypes["actions"]["deleteArtifact"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes a secret in an environment using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. - */ - deleteEnvironmentSecret: { - (params?: RestEndpointMethodTypes["actions"]["deleteEnvironmentSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. - */ - deleteOrgSecret: { - (params?: RestEndpointMethodTypes["actions"]["deleteOrgSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. - */ - deleteRepoSecret: { - (params?: RestEndpointMethodTypes["actions"]["deleteRepoSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Forces the removal of a self-hosted runner from an organization. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - deleteSelfHostedRunnerFromOrg: { - (params?: RestEndpointMethodTypes["actions"]["deleteSelfHostedRunnerFromOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Forces the removal of a self-hosted runner from a repository. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. - * - * You must authenticate using an access token with the `repo` - * scope to use this endpoint. - */ - deleteSelfHostedRunnerFromRepo: { - (params?: RestEndpointMethodTypes["actions"]["deleteSelfHostedRunnerFromRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Delete a specific workflow run. Anyone with write access to the repository can use this endpoint. If the repository is - * private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:write` permission to use - * this endpoint. - */ - deleteWorkflowRun: { - (params?: RestEndpointMethodTypes["actions"]["deleteWorkflowRun"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes all logs for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. - */ - deleteWorkflowRunLogs: { - (params?: RestEndpointMethodTypes["actions"]["deleteWorkflowRunLogs"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes a repository from the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - disableSelectedRepositoryGithubActionsOrganization: { - (params?: RestEndpointMethodTypes["actions"]["disableSelectedRepositoryGithubActionsOrganization"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Disables a workflow and sets the `state` of the workflow to `disabled_manually`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. - */ - disableWorkflow: { - (params?: RestEndpointMethodTypes["actions"]["disableWorkflow"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a redirect URL to download an archive for a repository. This URL expires after 1 minute. Look for `Location:` in - * the response header to find the URL for the download. The `:archive_format` must be `zip`. Anyone with read access to - * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. - * GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - downloadArtifact: { - (params?: RestEndpointMethodTypes["actions"]["downloadArtifact"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a redirect URL to download a plain text file of logs for a workflow job. This link expires after 1 minute. Look - * for `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can - * use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must - * have the `actions:read` permission to use this endpoint. - */ - downloadJobLogsForWorkflowRun: { - (params?: RestEndpointMethodTypes["actions"]["downloadJobLogsForWorkflowRun"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a redirect URL to download an archive of log files for a workflow run. This link expires after 1 minute. Look for - * `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can use - * this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have - * the `actions:read` permission to use this endpoint. - */ - downloadWorkflowRunLogs: { - (params?: RestEndpointMethodTypes["actions"]["downloadWorkflowRunLogs"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - enableSelectedRepositoryGithubActionsOrganization: { - (params?: RestEndpointMethodTypes["actions"]["enableSelectedRepositoryGithubActionsOrganization"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Enables a workflow and sets the `state` of the workflow to `active`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. - */ - enableWorkflow: { - (params?: RestEndpointMethodTypes["actions"]["enableWorkflow"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the selected actions that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."" - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - getAllowedActionsOrganization: { - (params?: RestEndpointMethodTypes["actions"]["getAllowedActionsOrganization"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the settings for selected actions that are allowed in a repository. To use this endpoint, the repository policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. - */ - getAllowedActionsRepository: { - (params?: RestEndpointMethodTypes["actions"]["getAllowedActionsRepository"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specific artifact for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - getArtifact: { - (params?: RestEndpointMethodTypes["actions"]["getArtifact"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get the public key for an environment, which you need to encrypt environment secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. - */ - getEnvironmentPublicKey: { - (params?: RestEndpointMethodTypes["actions"]["getEnvironmentPublicKey"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a single environment secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. - */ - getEnvironmentSecret: { - (params?: RestEndpointMethodTypes["actions"]["getEnvironmentSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the GitHub Actions permissions policy for repositories and allowed actions in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - getGithubActionsPermissionsOrganization: { - (params?: RestEndpointMethodTypes["actions"]["getGithubActionsPermissionsOrganization"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions allowed to run in the repository. - * - * You must authenticate using an access token with the `repo` scope to use this - * endpoint. GitHub Apps must have the `administration` repository permission to use this API. - */ - getGithubActionsPermissionsRepository: { - (params?: RestEndpointMethodTypes["actions"]["getGithubActionsPermissionsRepository"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specific job in a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - getJobForWorkflowRun: { - (params?: RestEndpointMethodTypes["actions"]["getJobForWorkflowRun"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. - */ - getOrgPublicKey: { - (params?: RestEndpointMethodTypes["actions"]["getOrgPublicKey"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. - */ - getOrgSecret: { - (params?: RestEndpointMethodTypes["actions"]["getOrgSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get all deployment environments for a workflow run that are waiting for protection rules to pass. - * - * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - getPendingDeploymentsForRun: { - (params?: RestEndpointMethodTypes["actions"]["getPendingDeploymentsForRun"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions allowed to run in the repository. - * - * You must authenticate using an access token with the `repo` scope to use this - * endpoint. GitHub Apps must have the `administration` repository permission to use this API. - * @deprecated octokit.rest.actions.getRepoPermissions() has been renamed to octokit.rest.actions.getGithubActionsPermissionsRepository() (2020-11-10) - */ - getRepoPermissions: { - (params?: RestEndpointMethodTypes["actions"]["getRepoPermissions"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. - */ - getRepoPublicKey: { - (params?: RestEndpointMethodTypes["actions"]["getRepoPublicKey"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. - */ - getRepoSecret: { - (params?: RestEndpointMethodTypes["actions"]["getRepoSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - getReviewsForRun: { - (params?: RestEndpointMethodTypes["actions"]["getReviewsForRun"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specific self-hosted runner configured in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - getSelfHostedRunnerForOrg: { - (params?: RestEndpointMethodTypes["actions"]["getSelfHostedRunnerForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specific self-hosted runner configured in a repository. - * - * You must authenticate using an access token with the `repo` scope to use this - * endpoint. - */ - getSelfHostedRunnerForRepo: { - (params?: RestEndpointMethodTypes["actions"]["getSelfHostedRunnerForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specific workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - getWorkflow: { - (params?: RestEndpointMethodTypes["actions"]["getWorkflow"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specific workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - getWorkflowRun: { - (params?: RestEndpointMethodTypes["actions"]["getWorkflowRun"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the number of billable minutes and total run time for a specific workflow run. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - getWorkflowRunUsage: { - (params?: RestEndpointMethodTypes["actions"]["getWorkflowRunUsage"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the number of billable minutes used by a specific workflow during the current billing cycle. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - getWorkflowUsage: { - (params?: RestEndpointMethodTypes["actions"]["getWorkflowUsage"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all artifacts for a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - listArtifactsForRepo: { - (params?: RestEndpointMethodTypes["actions"]["listArtifactsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all secrets available in an environment without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. - */ - listEnvironmentSecrets: { - (params?: RestEndpointMethodTypes["actions"]["listEnvironmentSecrets"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists jobs for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). - */ - listJobsForWorkflowRun: { - (params?: RestEndpointMethodTypes["actions"]["listJobsForWorkflowRun"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. - */ - listOrgSecrets: { - (params?: RestEndpointMethodTypes["actions"]["listOrgSecrets"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. - */ - listRepoSecrets: { - (params?: RestEndpointMethodTypes["actions"]["listRepoSecrets"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - listRepoWorkflows: { - (params?: RestEndpointMethodTypes["actions"]["listRepoWorkflows"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists binaries for the runner application that you can download and run. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - listRunnerApplicationsForOrg: { - (params?: RestEndpointMethodTypes["actions"]["listRunnerApplicationsForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists binaries for the runner application that you can download and run. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. - */ - listRunnerApplicationsForRepo: { - (params?: RestEndpointMethodTypes["actions"]["listRunnerApplicationsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. - */ - listSelectedReposForOrgSecret: { - (params?: RestEndpointMethodTypes["actions"]["listSelectedReposForOrgSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - listSelectedRepositoriesEnabledGithubActionsOrganization: { - (params?: RestEndpointMethodTypes["actions"]["listSelectedRepositoriesEnabledGithubActionsOrganization"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all self-hosted runners configured in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - listSelfHostedRunnersForOrg: { - (params?: RestEndpointMethodTypes["actions"]["listSelfHostedRunnersForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all self-hosted runners configured in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. - */ - listSelfHostedRunnersForRepo: { - (params?: RestEndpointMethodTypes["actions"]["listSelfHostedRunnersForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - listWorkflowRunArtifacts: { - (params?: RestEndpointMethodTypes["actions"]["listWorkflowRunArtifacts"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List all workflow runs for a workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). - * - * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. - */ - listWorkflowRuns: { - (params?: RestEndpointMethodTypes["actions"]["listWorkflowRuns"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all workflow runs for a repository. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). - * - * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - listWorkflowRunsForRepo: { - (params?: RestEndpointMethodTypes["actions"]["listWorkflowRunsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Re-runs your workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. - */ - reRunWorkflow: { - (params?: RestEndpointMethodTypes["actions"]["reRunWorkflow"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. - */ - removeSelectedRepoFromOrgSecret: { - (params?: RestEndpointMethodTypes["actions"]["removeSelectedRepoFromOrgSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Approve or reject pending deployments that are waiting on approval by a required reviewer. - * - * Anyone with read access to the repository contents and deployments can use this endpoint. - */ - reviewPendingDeploymentsForRun: { - (params?: RestEndpointMethodTypes["actions"]["reviewPendingDeploymentsForRun"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Sets the actions that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * If the organization belongs to an enterprise that has `selected` actions set at the enterprise level, then you cannot override any of the enterprise's allowed actions settings. - * - * To use the `patterns_allowed` setting for private repositories, the organization must belong to an enterprise. If the organization does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories in the organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - setAllowedActionsOrganization: { - (params?: RestEndpointMethodTypes["actions"]["setAllowedActionsOrganization"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Sets the actions that are allowed in a repository. To use this endpoint, the repository permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." - * - * If the repository belongs to an organization or enterprise that has `selected` actions set at the organization or enterprise levels, then you cannot override any of the allowed actions settings. - * - * To use the `patterns_allowed` setting for private repositories, the repository must belong to an enterprise. If the repository does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. - */ - setAllowedActionsRepository: { - (params?: RestEndpointMethodTypes["actions"]["setAllowedActionsRepository"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Sets the GitHub Actions permissions policy for repositories and allowed actions in an organization. - * - * If the organization belongs to an enterprise that has set restrictive permissions at the enterprise level, such as `allowed_actions` to `selected` actions, then you cannot override them for the organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - setGithubActionsPermissionsOrganization: { - (params?: RestEndpointMethodTypes["actions"]["setGithubActionsPermissionsOrganization"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Sets the GitHub Actions permissions policy for enabling GitHub Actions and allowed actions in the repository. - * - * If the repository belongs to an organization or enterprise that has set restrictive permissions at the organization or enterprise levels, such as `allowed_actions` to `selected` actions, then you cannot override them for the repository. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. - */ - setGithubActionsPermissionsRepository: { - (params?: RestEndpointMethodTypes["actions"]["setGithubActionsPermissionsRepository"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. - */ - setSelectedReposForOrgSecret: { - (params?: RestEndpointMethodTypes["actions"]["setSelectedReposForOrgSecret"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Replaces the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - setSelectedRepositoriesEnabledGithubActionsOrganization: { - (params?: RestEndpointMethodTypes["actions"]["setSelectedRepositoriesEnabledGithubActionsOrganization"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - activity: { - checkRepoIsStarredByAuthenticatedUser: { - (params?: RestEndpointMethodTypes["activity"]["checkRepoIsStarredByAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This endpoint should only be used to stop watching a repository. To control whether or not you wish to receive notifications from a repository, [set the repository's subscription manually](https://docs.github.com/rest/reference/activity#set-a-repository-subscription). - */ - deleteRepoSubscription: { - (params?: RestEndpointMethodTypes["activity"]["deleteRepoSubscription"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Mutes all future notifications for a conversation until you comment on the thread or get an **@mention**. If you are watching the repository of the thread, you will still receive notifications. To ignore future notifications for a repository you are watching, use the [Set a thread subscription](https://docs.github.com/rest/reference/activity#set-a-thread-subscription) endpoint and set `ignore` to `true`. - */ - deleteThreadSubscription: { - (params?: RestEndpointMethodTypes["activity"]["deleteThreadSubscription"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * GitHub provides several timeline resources in [Atom](http://en.wikipedia.org/wiki/Atom_(standard)) format. The Feeds API lists all the feeds available to the authenticated user: - * - * * **Timeline**: The GitHub global public timeline - * * **User**: The public timeline for any user, using [URI template](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) - * * **Current user public**: The public timeline for the authenticated user - * * **Current user**: The private timeline for the authenticated user - * * **Current user actor**: The private timeline for activity created by the authenticated user - * * **Current user organizations**: The private timeline for the organizations the authenticated user is a member of. - * * **Security advisories**: A collection of public announcements that provide information about security-related vulnerabilities in software on GitHub. - * - * **Note**: Private feeds are only returned when [authenticating via Basic Auth](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) since current feed URIs use the older, non revocable auth tokens. - */ - getFeeds: { - (params?: RestEndpointMethodTypes["activity"]["getFeeds"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getRepoSubscription: { - (params?: RestEndpointMethodTypes["activity"]["getRepoSubscription"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getThread: { - (params?: RestEndpointMethodTypes["activity"]["getThread"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This checks to see if the current user is subscribed to a thread. You can also [get a repository subscription](https://docs.github.com/rest/reference/activity#get-a-repository-subscription). - * - * Note that subscriptions are only generated if a user is participating in a conversation--for example, they've replied to the thread, were **@mentioned**, or manually subscribe to a thread. - */ - getThreadSubscriptionForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["activity"]["getThreadSubscriptionForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * If you are authenticated as the given user, you will see your private events. Otherwise, you'll only see public events. - */ - listEventsForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["activity"]["listEventsForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List all notifications for the current user, sorted by most recently updated. - */ - listNotificationsForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["activity"]["listNotificationsForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This is the user's organization dashboard. You must be authenticated as the user to view this. - */ - listOrgEventsForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["activity"]["listOrgEventsForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * We delay the public events feed by five minutes, which means the most recent event returned by the public events API actually occurred at least five minutes ago. - */ - listPublicEvents: { - (params?: RestEndpointMethodTypes["activity"]["listPublicEvents"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listPublicEventsForRepoNetwork: { - (params?: RestEndpointMethodTypes["activity"]["listPublicEventsForRepoNetwork"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listPublicEventsForUser: { - (params?: RestEndpointMethodTypes["activity"]["listPublicEventsForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listPublicOrgEvents: { - (params?: RestEndpointMethodTypes["activity"]["listPublicOrgEvents"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events. - */ - listReceivedEventsForUser: { - (params?: RestEndpointMethodTypes["activity"]["listReceivedEventsForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listReceivedPublicEventsForUser: { - (params?: RestEndpointMethodTypes["activity"]["listReceivedPublicEventsForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listRepoEvents: { - (params?: RestEndpointMethodTypes["activity"]["listRepoEvents"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List all notifications for the current user. - */ - listRepoNotificationsForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["activity"]["listRepoNotificationsForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists repositories the authenticated user has starred. - * - * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: - */ - listReposStarredByAuthenticatedUser: { - (params?: RestEndpointMethodTypes["activity"]["listReposStarredByAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists repositories a user has starred. - * - * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: - */ - listReposStarredByUser: { - (params?: RestEndpointMethodTypes["activity"]["listReposStarredByUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists repositories a user is watching. - */ - listReposWatchedByUser: { - (params?: RestEndpointMethodTypes["activity"]["listReposWatchedByUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the people that have starred the repository. - * - * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: - */ - listStargazersForRepo: { - (params?: RestEndpointMethodTypes["activity"]["listStargazersForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists repositories the authenticated user is watching. - */ - listWatchedReposForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["activity"]["listWatchedReposForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the people watching the specified repository. - */ - listWatchersForRepo: { - (params?: RestEndpointMethodTypes["activity"]["listWatchersForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Marks all notifications as "read" removes it from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. - */ - markNotificationsAsRead: { - (params?: RestEndpointMethodTypes["activity"]["markNotificationsAsRead"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Marks all notifications in a repository as "read" removes them from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List repository notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. - */ - markRepoNotificationsAsRead: { - (params?: RestEndpointMethodTypes["activity"]["markRepoNotificationsAsRead"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - markThreadAsRead: { - (params?: RestEndpointMethodTypes["activity"]["markThreadAsRead"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * If you would like to watch a repository, set `subscribed` to `true`. If you would like to ignore notifications made within a repository, set `ignored` to `true`. If you would like to stop watching a repository, [delete the repository's subscription](https://docs.github.com/rest/reference/activity#delete-a-repository-subscription) completely. - */ - setRepoSubscription: { - (params?: RestEndpointMethodTypes["activity"]["setRepoSubscription"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * If you are watching a repository, you receive notifications for all threads by default. Use this endpoint to ignore future notifications for threads until you comment on the thread or get an **@mention**. - * - * You can also use this endpoint to subscribe to threads that you are currently not receiving notifications for or to subscribed to threads that you have previously ignored. - * - * Unsubscribing from a conversation in a repository that you are not watching is functionally equivalent to the [Delete a thread subscription](https://docs.github.com/rest/reference/activity#delete-a-thread-subscription) endpoint. - */ - setThreadSubscription: { - (params?: RestEndpointMethodTypes["activity"]["setThreadSubscription"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - starRepoForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["activity"]["starRepoForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - unstarRepoForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["activity"]["unstarRepoForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - apps: { - /** - * Add a single repository to an installation. The authenticated user must have admin access to the repository. - * - * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. - */ - addRepoToInstallation: { - (params?: RestEndpointMethodTypes["apps"]["addRepoToInstallation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * OAuth applications can use a special API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to use this endpoint, where the username is the OAuth application `client_id` and the password is its `client_secret`. Invalid tokens will return `404 NOT FOUND`. - */ - checkToken: { - (params?: RestEndpointMethodTypes["apps"]["checkToken"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment. - * - * The app must create a content attachment within six hours of the content reference URL being posted. See "[Using content attachments](https://docs.github.com/apps/using-content-attachments/)" for details about content attachments. - * - * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - createContentAttachment: { - (params?: RestEndpointMethodTypes["apps"]["createContentAttachment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`. - */ - createFromManifest: { - (params?: RestEndpointMethodTypes["apps"]["createFromManifest"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates an installation access token that enables a GitHub App to make authenticated API requests for the app's installation on an organization or individual account. Installation tokens expire one hour from the time you create them. Using an expired token produces a status code of `401 - Unauthorized`, and requires creating a new installation token. By default the installation token has access to all repositories that the installation can access. To restrict the access to specific repositories, you can provide the `repository_ids` when creating the token. When you omit `repository_ids`, the response does not contain the `repositories` key. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - createInstallationAccessToken: { - (params?: RestEndpointMethodTypes["apps"]["createInstallationAccessToken"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * OAuth application owners can revoke a grant for their OAuth application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid OAuth `access_token` as an input parameter and the grant for the token's owner will be deleted. - * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). - */ - deleteAuthorization: { - (params?: RestEndpointMethodTypes["apps"]["deleteAuthorization"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Uninstalls a GitHub App on a user, organization, or business account. If you prefer to temporarily suspend an app's access to your account's resources, then we recommend the "[Suspend an app installation](https://docs.github.com/rest/reference/apps/#suspend-an-app-installation)" endpoint. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - deleteInstallation: { - (params?: RestEndpointMethodTypes["apps"]["deleteInstallation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * OAuth application owners can revoke a single token for an OAuth application. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. - */ - deleteToken: { - (params?: RestEndpointMethodTypes["apps"]["deleteToken"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns the GitHub App associated with the authentication credentials used. To see how many app installations are associated with this GitHub App, see the `installations_count` in the response. For more details about your app's installations, see the "[List installations for the authenticated app](https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app)" endpoint. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - getAuthenticated: { - (params?: RestEndpointMethodTypes["apps"]["getAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note**: The `:app_slug` is just the URL-friendly name of your GitHub App. You can find this on the settings page for your GitHub App (e.g., `https://github.com/settings/apps/:app_slug`). - * - * If the GitHub App you specify is public, you can access this endpoint without authenticating. If the GitHub App you specify is private, you must authenticate with a [personal access token](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - getBySlug: { - (params?: RestEndpointMethodTypes["apps"]["getBySlug"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Enables an authenticated GitHub App to find an installation's information using the installation id. The installation's account type (`target_type`) will be either an organization or a user account, depending which account the repository belongs to. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - getInstallation: { - (params?: RestEndpointMethodTypes["apps"]["getInstallation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Enables an authenticated GitHub App to find the organization's installation information. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - getOrgInstallation: { - (params?: RestEndpointMethodTypes["apps"]["getOrgInstallation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Enables an authenticated GitHub App to find the repository's installation information. The installation's account type will be either an organization or a user account, depending which account the repository belongs to. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - getRepoInstallation: { - (params?: RestEndpointMethodTypes["apps"]["getRepoInstallation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - getSubscriptionPlanForAccount: { - (params?: RestEndpointMethodTypes["apps"]["getSubscriptionPlanForAccount"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - getSubscriptionPlanForAccountStubbed: { - (params?: RestEndpointMethodTypes["apps"]["getSubscriptionPlanForAccountStubbed"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Enables an authenticated GitHub App to find the user’s installation information. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - getUserInstallation: { - (params?: RestEndpointMethodTypes["apps"]["getUserInstallation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - getWebhookConfigForApp: { - (params?: RestEndpointMethodTypes["apps"]["getWebhookConfigForApp"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns user and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - listAccountsForPlan: { - (params?: RestEndpointMethodTypes["apps"]["listAccountsForPlan"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns repository and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - listAccountsForPlanStubbed: { - (params?: RestEndpointMethodTypes["apps"]["listAccountsForPlanStubbed"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access for an installation. - * - * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. - * - * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. - * - * The access the user has to each repository is included in the hash under the `permissions` key. - */ - listInstallationReposForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["apps"]["listInstallationReposForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - * - * The permissions the installation has are included under the `permissions` key. - */ - listInstallations: { - (params?: RestEndpointMethodTypes["apps"]["listInstallations"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists installations of your GitHub App that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. - * - * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. - * - * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. - * - * You can find the permissions for the installation under the `permissions` key. - */ - listInstallationsForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["apps"]["listInstallationsForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all plans that are part of your GitHub Marketplace listing. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - listPlans: { - (params?: RestEndpointMethodTypes["apps"]["listPlans"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all plans that are part of your GitHub Marketplace listing. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - listPlansStubbed: { - (params?: RestEndpointMethodTypes["apps"]["listPlansStubbed"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List repositories that an app installation can access. - * - * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - listReposAccessibleToInstallation: { - (params?: RestEndpointMethodTypes["apps"]["listReposAccessibleToInstallation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). - */ - listSubscriptionsForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["apps"]["listSubscriptionsForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). - */ - listSubscriptionsForAuthenticatedUserStubbed: { - (params?: RestEndpointMethodTypes["apps"]["listSubscriptionsForAuthenticatedUserStubbed"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Remove a single repository from an installation. The authenticated user must have admin access to the repository. - * - * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. - */ - removeRepoFromInstallation: { - (params?: RestEndpointMethodTypes["apps"]["removeRepoFromInstallation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * OAuth applications can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. - */ - resetToken: { - (params?: RestEndpointMethodTypes["apps"]["resetToken"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Revokes the installation token you're using to authenticate as an installation and access this endpoint. - * - * Once an installation token is revoked, the token is invalidated and cannot be used. Other endpoints that require the revoked installation token must have a new installation token to work. You can create a new token using the "[Create an installation access token for an app](https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app)" endpoint. - * - * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - revokeInstallationAccessToken: { - (params?: RestEndpointMethodTypes["apps"]["revokeInstallationAccessToken"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Use a non-scoped user-to-server OAuth access token to create a repository scoped and/or permission scoped user-to-server OAuth access token. You can specify which repositories the token can access and which permissions are granted to the token. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. - */ - scopeToken: { - (params?: RestEndpointMethodTypes["apps"]["scopeToken"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Suspends a GitHub App on a user, organization, or business account, which blocks the app from accessing the account's resources. When a GitHub App is suspended, the app's access to the GitHub API or webhook events is blocked for that account. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - suspendInstallation: { - (params?: RestEndpointMethodTypes["apps"]["suspendInstallation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes a GitHub App installation suspension. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - unsuspendInstallation: { - (params?: RestEndpointMethodTypes["apps"]["unsuspendInstallation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Updates the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - updateWebhookConfigForApp: { - (params?: RestEndpointMethodTypes["apps"]["updateWebhookConfigForApp"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - billing: { - /** - * Gets the summary of the free and paid GitHub Actions minutes used. - * - * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * Access tokens must have the `repo` or `admin:org` scope. - */ - getGithubActionsBillingOrg: { - (params?: RestEndpointMethodTypes["billing"]["getGithubActionsBillingOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the summary of the free and paid GitHub Actions minutes used. - * - * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * Access tokens must have the `user` scope. - */ - getGithubActionsBillingUser: { - (params?: RestEndpointMethodTypes["billing"]["getGithubActionsBillingUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the free and paid storage usued for GitHub Packages in gigabytes. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * Access tokens must have the `repo` or `admin:org` scope. - */ - getGithubPackagesBillingOrg: { - (params?: RestEndpointMethodTypes["billing"]["getGithubPackagesBillingOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the free and paid storage used for GitHub Packages in gigabytes. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * Access tokens must have the `user` scope. - */ - getGithubPackagesBillingUser: { - (params?: RestEndpointMethodTypes["billing"]["getGithubPackagesBillingUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the estimated paid and estimated total storage used for GitHub Actions and Github Packages. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * Access tokens must have the `repo` or `admin:org` scope. - */ - getSharedStorageBillingOrg: { - (params?: RestEndpointMethodTypes["billing"]["getSharedStorageBillingOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the estimated paid and estimated total storage used for GitHub Actions and Github Packages. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * Access tokens must have the `user` scope. - */ - getSharedStorageBillingUser: { - (params?: RestEndpointMethodTypes["billing"]["getSharedStorageBillingUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - checks: { - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Creates a new check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to create check runs. - * - * In a check suite, GitHub limits the number of check runs with the same name to 1000. Once these check runs exceed 1000, GitHub will start to automatically delete older check runs. - */ - create: { - (params?: RestEndpointMethodTypes["checks"]["create"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. - * - * By default, check suites are automatically created when you create a [check run](https://docs.github.com/rest/reference/checks#check-runs). You only need to use this endpoint for manually creating check suites when you've disabled automatic creation using "[Update repository preferences for check suites](https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites)". Your GitHub App must have the `checks:write` permission to create check suites. - */ - createSuite: { - (params?: RestEndpointMethodTypes["checks"]["createSuite"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Gets a single check run using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. - */ - get: { - (params?: RestEndpointMethodTypes["checks"]["get"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. - * - * Gets a single check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. - */ - getSuite: { - (params?: RestEndpointMethodTypes["checks"]["getSuite"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists annotations for a check run using the annotation `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get annotations for a check run. OAuth Apps and authenticated users must have the `repo` scope to get annotations for a check run in a private repository. - */ - listAnnotations: { - (params?: RestEndpointMethodTypes["checks"]["listAnnotations"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Lists check runs for a commit ref. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. - */ - listForRef: { - (params?: RestEndpointMethodTypes["checks"]["listForRef"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Lists check runs for a check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. - */ - listForSuite: { - (params?: RestEndpointMethodTypes["checks"]["listForSuite"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. - * - * Lists check suites for a commit `ref`. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to list check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. - */ - listSuitesForRef: { - (params?: RestEndpointMethodTypes["checks"]["listSuitesForRef"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. This endpoint will trigger the [`check_suite` webhook](https://docs.github.com/webhooks/event-payloads/#check_suite) event with the action `rerequested`. When a check suite is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. - * - * To rerequest a check suite, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. - */ - rerequestSuite: { - (params?: RestEndpointMethodTypes["checks"]["rerequestSuite"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Changes the default automatic flow when creating check suites. By default, a check suite is automatically created each time code is pushed to a repository. When you disable the automatic creation of check suites, you can manually [Create a check suite](https://docs.github.com/rest/reference/checks#create-a-check-suite). You must have admin permissions in the repository to set preferences for check suites. - */ - setSuitesPreferences: { - (params?: RestEndpointMethodTypes["checks"]["setSuitesPreferences"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Updates a check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to edit check runs. - */ - update: { - (params?: RestEndpointMethodTypes["checks"]["update"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - codeScanning: { - /** - * Deletes a specified code scanning analysis from a repository. For - * private repositories, you must use an access token with the `repo` scope. For public repositories, - * you must use an access token with `public_repo` and `repo:security_events` scopes. - * GitHub Apps must have the `security_events` write permission to use this endpoint. - * - * You can delete one analysis at a time. - * To delete a series of analyses, start with the most recent analysis and work backwards. - * Conceptually, the process is similar to the undo function in a text editor. - * - * When you list the analyses for a repository, - * one or more will be identified as deletable in the response: - * - * ``` - * "deletable": true - * ``` - * - * An analysis is deletable when it's the most recent in a set of analyses. - * Typically, a repository will have multiple sets of analyses - * for each enabled code scanning tool, - * where a set is determined by a unique combination of analysis values: - * - * * `ref` - * * `tool` - * * `analysis_key` - * * `environment` - * - * If you attempt to delete an analysis that is not the most recent in a set, - * you'll get a 400 response with the message: - * - * ``` - * Analysis specified is not deletable. - * ``` - * - * The response from a successful `DELETE` operation provides you with - * two alternative URLs for deleting the next analysis in the set - * (see the example default response below). - * Use the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis - * in the set. This is a useful option if you want to preserve at least one analysis - * for the specified tool in your repository. - * Use the `confirm_delete_url` URL if you are content to remove all analyses for a tool. - * When you delete the last analysis in a set the value of `next_analysis_url` and `confirm_delete_url` - * in the 200 response is `null`. - * - * As an example of the deletion process, - * let's imagine that you added a workflow that configured a particular code scanning tool - * to analyze the code in a repository. This tool has added 15 analyses: - * 10 on the default branch, and another 5 on a topic branch. - * You therefore have two separate sets of analyses for this tool. - * You've now decided that you want to remove all of the analyses for the tool. - * To do this you must make 15 separate deletion requests. - * To start, you must find the deletable analysis for one of the sets, - * step through deleting the analyses in that set, - * and then repeat the process for the second set. - * The procedure therefore consists of a nested loop: - * - * **Outer loop**: - * * List the analyses for the repository, filtered by tool. - * * Parse this list to find a deletable analysis. If found: - * - * **Inner loop**: - * * Delete the identified analysis. - * * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration. - * - * The above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely. - */ - deleteAnalysis: { - (params?: RestEndpointMethodTypes["codeScanning"]["deleteAnalysis"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` read permission to use this endpoint. - * - * **Deprecation notice**: - * The instances field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The same information can now be retrieved via a GET request to the URL specified by `instances_url`. - */ - getAlert: { - (params?: RestEndpointMethodTypes["codeScanning"]["getAlert"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specified code scanning analysis for a repository. - * You must use an access token with the `security_events` scope to use this endpoint. - * GitHub Apps must have the `security_events` read permission to use this endpoint. - * - * The default JSON response contains fields that describe the analysis. - * This includes the Git reference and commit SHA to which the analysis relates, - * the datetime of the analysis, the name of the code scanning tool, - * and the number of alerts. - * - * The `rules_count` field in the default response give the number of rules - * that were run in the analysis. - * For very old analyses this data is not available, - * and `0` is returned in this field. - * - * If you use the Accept header `application/sarif+json`, - * the response contains the analysis data that was uploaded. - * This is formatted as - * [SARIF version 2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html). - * For an example response, see "[Custom media type for code scanning](#custom-media-type-for-code-scanning)." - * - * **Deprecation notice**: - * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. - */ - getAnalysis: { - (params?: RestEndpointMethodTypes["codeScanning"]["getAnalysis"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets information about a SARIF upload, including the status and the URL of the analysis that was uploaded so that you can retrieve details of the analysis. For more information, see "[Get a code scanning analysis for a repository](/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository)." You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` read permission to use this endpoint. - */ - getSarif: { - (params?: RestEndpointMethodTypes["codeScanning"]["getSarif"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all open code scanning alerts for the default branch (usually `main` - * or `master`). You must use an access token with the `security_events` scope to use - * this endpoint. GitHub Apps must have the `security_events` read permission to use - * this endpoint. - * - * The response includes a `most_recent_instance` object. - * This provides details of the most recent instance of this alert - * for the default branch or for the specified Git reference - * (if you used `ref` in the request). - */ - listAlertsForRepo: { - (params?: RestEndpointMethodTypes["codeScanning"]["listAlertsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all instances of the specified code scanning alert. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` read permission to use this endpoint. - */ - listAlertsInstances: { - (params?: RestEndpointMethodTypes["codeScanning"]["listAlertsInstances"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the details of all code scanning analyses for a repository, - * starting with the most recent. - * The response is paginated and you can use the `page` and `per_page` parameters - * to list the analyses you're interested in. - * By default 30 analyses are listed per page. - * - * The `rules_count` field in the response give the number of rules - * that were run in the analysis. - * For very old analyses this data is not available, - * and `0` is returned in this field. - * - * You must use an access token with the `security_events` scope to use this endpoint. - * GitHub Apps must have the `security_events` read permission to use this endpoint. - * - * **Deprecation notice**: - * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. - */ - listRecentAnalyses: { - (params?: RestEndpointMethodTypes["codeScanning"]["listRecentAnalyses"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Updates the status of a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint. - */ - updateAlert: { - (params?: RestEndpointMethodTypes["codeScanning"]["updateAlert"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint. - * - * There are two places where you can upload code scanning results. - * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/github/finding-security-vulnerabilities-and-errors-in-your-code/triaging-code-scanning-alerts-in-pull-requests)." - * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/github/finding-security-vulnerabilities-and-errors-in-your-code/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." - * - * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: - * - * ``` - * gzip -c analysis-data.sarif | base64 - * ``` - * - * SARIF upload supports a maximum of 1000 results per analysis run. Any results over this limit are ignored. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries. - * - * The `202 Accepted`, response includes an `id` value. - * You can use this ID to check the status of the upload by using this for the `/sarifs/{sarif_id}` endpoint. - * For more information, see "[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload)." - */ - uploadSarif: { - (params?: RestEndpointMethodTypes["codeScanning"]["uploadSarif"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - codesOfConduct: { - getAllCodesOfConduct: { - (params?: RestEndpointMethodTypes["codesOfConduct"]["getAllCodesOfConduct"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getConductCode: { - (params?: RestEndpointMethodTypes["codesOfConduct"]["getConductCode"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns the contents of the repository's code of conduct file, if one is detected. - * - * A code of conduct is detected if there is a file named `CODE_OF_CONDUCT` in the root directory of the repository. GitHub detects which code of conduct it is using fuzzy matching. - */ - getForRepo: { - (params?: RestEndpointMethodTypes["codesOfConduct"]["getForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - emojis: { - /** - * Lists all the emojis available to use on GitHub. - */ - get: { - (params?: RestEndpointMethodTypes["emojis"]["get"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - enterpriseAdmin: { - /** - * Removes an organization from the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - disableSelectedOrganizationGithubActionsEnterprise: { - (params?: RestEndpointMethodTypes["enterpriseAdmin"]["disableSelectedOrganizationGithubActionsEnterprise"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Adds an organization to the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - enableSelectedOrganizationGithubActionsEnterprise: { - (params?: RestEndpointMethodTypes["enterpriseAdmin"]["enableSelectedOrganizationGithubActionsEnterprise"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the selected actions that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - getAllowedActionsEnterprise: { - (params?: RestEndpointMethodTypes["enterpriseAdmin"]["getAllowedActionsEnterprise"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the GitHub Actions permissions policy for organizations and allowed actions in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - getGithubActionsPermissionsEnterprise: { - (params?: RestEndpointMethodTypes["enterpriseAdmin"]["getGithubActionsPermissionsEnterprise"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the organizations that are selected to have GitHub Actions enabled in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - listSelectedOrganizationsEnabledGithubActionsEnterprise: { - (params?: RestEndpointMethodTypes["enterpriseAdmin"]["listSelectedOrganizationsEnabledGithubActionsEnterprise"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Sets the actions that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - setAllowedActionsEnterprise: { - (params?: RestEndpointMethodTypes["enterpriseAdmin"]["setAllowedActionsEnterprise"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Sets the GitHub Actions permissions policy for organizations and allowed actions in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - setGithubActionsPermissionsEnterprise: { - (params?: RestEndpointMethodTypes["enterpriseAdmin"]["setGithubActionsPermissionsEnterprise"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Replaces the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - setSelectedOrganizationsEnabledGithubActionsEnterprise: { - (params?: RestEndpointMethodTypes["enterpriseAdmin"]["setSelectedOrganizationsEnabledGithubActionsEnterprise"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - gists: { - checkIsStarred: { - (params?: RestEndpointMethodTypes["gists"]["checkIsStarred"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Allows you to add a new gist with one or more files. - * - * **Note:** Don't name your files "gistfile" with a numerical suffix. This is the format of the automatic naming scheme that Gist uses internally. - */ - create: { - (params?: RestEndpointMethodTypes["gists"]["create"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - createComment: { - (params?: RestEndpointMethodTypes["gists"]["createComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - delete: { - (params?: RestEndpointMethodTypes["gists"]["delete"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deleteComment: { - (params?: RestEndpointMethodTypes["gists"]["deleteComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note**: This was previously `/gists/:gist_id/fork`. - */ - fork: { - (params?: RestEndpointMethodTypes["gists"]["fork"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - get: { - (params?: RestEndpointMethodTypes["gists"]["get"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getComment: { - (params?: RestEndpointMethodTypes["gists"]["getComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getRevision: { - (params?: RestEndpointMethodTypes["gists"]["getRevision"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the authenticated user's gists or if called anonymously, this endpoint returns all public gists: - */ - list: { - (params?: RestEndpointMethodTypes["gists"]["list"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listComments: { - (params?: RestEndpointMethodTypes["gists"]["listComments"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listCommits: { - (params?: RestEndpointMethodTypes["gists"]["listCommits"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists public gists for the specified user: - */ - listForUser: { - (params?: RestEndpointMethodTypes["gists"]["listForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listForks: { - (params?: RestEndpointMethodTypes["gists"]["listForks"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List public gists sorted by most recently updated to least recently updated. - * - * Note: With [pagination](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination), you can fetch up to 3000 gists. For example, you can fetch 100 pages with 30 gists per page or 30 pages with 100 gists per page. - */ - listPublic: { - (params?: RestEndpointMethodTypes["gists"]["listPublic"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List the authenticated user's starred gists: - */ - listStarred: { - (params?: RestEndpointMethodTypes["gists"]["listStarred"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - star: { - (params?: RestEndpointMethodTypes["gists"]["star"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - unstar: { - (params?: RestEndpointMethodTypes["gists"]["unstar"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Allows you to update or delete a gist file and rename gist files. Files from the previous version of the gist that aren't explicitly changed during an edit are unchanged. - */ - update: { - (params?: RestEndpointMethodTypes["gists"]["update"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - updateComment: { - (params?: RestEndpointMethodTypes["gists"]["updateComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - git: { - createBlob: { - (params?: RestEndpointMethodTypes["git"]["createBlob"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates a new Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - createCommit: { - (params?: RestEndpointMethodTypes["git"]["createCommit"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates a reference for your repository. You are unable to create new references for empty repositories, even if the commit SHA-1 hash used exists. Empty repositories are repositories without branches. - */ - createRef: { - (params?: RestEndpointMethodTypes["git"]["createRef"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Note that creating a tag object does not create the reference that makes a tag in Git. If you want to create an annotated tag in Git, you have to do this call to create the tag object, and then [create](https://docs.github.com/rest/reference/git#create-a-reference) the `refs/tags/[tag]` reference. If you want to create a lightweight tag, you only have to [create](https://docs.github.com/rest/reference/git#create-a-reference) the tag reference - this call would be unnecessary. - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - createTag: { - (params?: RestEndpointMethodTypes["git"]["createTag"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * The tree creation API accepts nested entries. If you specify both a tree and a nested path modifying that tree, this endpoint will overwrite the contents of the tree with the new path contents, and create a new tree structure. - * - * If you use this endpoint to add, delete, or modify the file contents in a tree, you will need to commit the tree and then update a branch to point to the commit. For more information see "[Create a commit](https://docs.github.com/rest/reference/git#create-a-commit)" and "[Update a reference](https://docs.github.com/rest/reference/git#update-a-reference)." - */ - createTree: { - (params?: RestEndpointMethodTypes["git"]["createTree"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deleteRef: { - (params?: RestEndpointMethodTypes["git"]["deleteRef"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * The `content` in the response will always be Base64 encoded. - * - * _Note_: This API supports blobs up to 100 megabytes in size. - */ - getBlob: { - (params?: RestEndpointMethodTypes["git"]["getBlob"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - getCommit: { - (params?: RestEndpointMethodTypes["git"]["getCommit"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns a single reference from your Git database. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't match an existing ref, a `404` is returned. - * - * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". - */ - getRef: { - (params?: RestEndpointMethodTypes["git"]["getRef"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - getTag: { - (params?: RestEndpointMethodTypes["git"]["getTag"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns a single tree using the SHA1 value for that tree. - * - * If `truncated` is `true` in the response then the number of items in the `tree` array exceeded our maximum limit. If you need to fetch more items, use the non-recursive method of fetching trees, and fetch one sub-tree at a time. - */ - getTree: { - (params?: RestEndpointMethodTypes["git"]["getTree"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns an array of references from your Git database that match the supplied name. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't exist in the repository, but existing refs start with `:ref`, they will be returned as an array. - * - * When you use this endpoint without providing a `:ref`, it will return an array of all the references from your Git database, including notes and stashes if they exist on the server. Anything in the namespace is returned, not just `heads` and `tags`. - * - * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". - * - * If you request matching references for a branch named `feature` but the branch `feature` doesn't exist, the response can still include other matching head refs that start with the word `feature`, such as `featureA` and `featureB`. - */ - listMatchingRefs: { - (params?: RestEndpointMethodTypes["git"]["listMatchingRefs"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - updateRef: { - (params?: RestEndpointMethodTypes["git"]["updateRef"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - gitignore: { - /** - * List all templates available to pass as an option when [creating a repository](https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user). - */ - getAllTemplates: { - (params?: RestEndpointMethodTypes["gitignore"]["getAllTemplates"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * The API also allows fetching the source of a single template. - * Use the raw [media type](https://docs.github.com/rest/overview/media-types/) to get the raw contents. - */ - getTemplate: { - (params?: RestEndpointMethodTypes["gitignore"]["getTemplate"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - interactions: { - /** - * Shows which type of GitHub user can interact with your public repositories and when the restriction expires. - */ - getRestrictionsForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["interactions"]["getRestrictionsForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Shows which type of GitHub user can interact with this organization and when the restriction expires. If there is no restrictions, you will see an empty response. - */ - getRestrictionsForOrg: { - (params?: RestEndpointMethodTypes["interactions"]["getRestrictionsForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Shows which type of GitHub user can interact with this repository and when the restriction expires. If there are no restrictions, you will see an empty response. - */ - getRestrictionsForRepo: { - (params?: RestEndpointMethodTypes["interactions"]["getRestrictionsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Shows which type of GitHub user can interact with your public repositories and when the restriction expires. - * @deprecated octokit.rest.interactions.getRestrictionsForYourPublicRepos() has been renamed to octokit.rest.interactions.getRestrictionsForAuthenticatedUser() (2021-02-02) - */ - getRestrictionsForYourPublicRepos: { - (params?: RestEndpointMethodTypes["interactions"]["getRestrictionsForYourPublicRepos"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes any interaction restrictions from your public repositories. - */ - removeRestrictionsForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["interactions"]["removeRestrictionsForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes all interaction restrictions from public repositories in the given organization. You must be an organization owner to remove restrictions. - */ - removeRestrictionsForOrg: { - (params?: RestEndpointMethodTypes["interactions"]["removeRestrictionsForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes all interaction restrictions from the given repository. You must have owner or admin access to remove restrictions. If the interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. - */ - removeRestrictionsForRepo: { - (params?: RestEndpointMethodTypes["interactions"]["removeRestrictionsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes any interaction restrictions from your public repositories. - * @deprecated octokit.rest.interactions.removeRestrictionsForYourPublicRepos() has been renamed to octokit.rest.interactions.removeRestrictionsForAuthenticatedUser() (2021-02-02) - */ - removeRestrictionsForYourPublicRepos: { - (params?: RestEndpointMethodTypes["interactions"]["removeRestrictionsForYourPublicRepos"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. - */ - setRestrictionsForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["interactions"]["setRestrictionsForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Temporarily restricts interactions to a certain type of GitHub user in any public repository in the given organization. You must be an organization owner to set these restrictions. Setting the interaction limit at the organization level will overwrite any interaction limits that are set for individual repositories owned by the organization. - */ - setRestrictionsForOrg: { - (params?: RestEndpointMethodTypes["interactions"]["setRestrictionsForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Temporarily restricts interactions to a certain type of GitHub user within the given repository. You must have owner or admin access to set these restrictions. If an interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. - */ - setRestrictionsForRepo: { - (params?: RestEndpointMethodTypes["interactions"]["setRestrictionsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. - * @deprecated octokit.rest.interactions.setRestrictionsForYourPublicRepos() has been renamed to octokit.rest.interactions.setRestrictionsForAuthenticatedUser() (2021-02-02) - */ - setRestrictionsForYourPublicRepos: { - (params?: RestEndpointMethodTypes["interactions"]["setRestrictionsForYourPublicRepos"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - issues: { - /** - * Adds up to 10 assignees to an issue. Users already assigned to an issue are not replaced. - */ - addAssignees: { - (params?: RestEndpointMethodTypes["issues"]["addAssignees"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - addLabels: { - (params?: RestEndpointMethodTypes["issues"]["addLabels"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Checks if a user has permission to be assigned to an issue in this repository. - * - * If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. - * - * Otherwise a `404` status code is returned. - */ - checkUserCanBeAssigned: { - (params?: RestEndpointMethodTypes["issues"]["checkUserCanBeAssigned"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Any user with pull access to a repository can create an issue. If [issues are disabled in the repository](https://help.github.com/articles/disabling-issues/), the API returns a `410 Gone` status. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details. - */ - create: { - (params?: RestEndpointMethodTypes["issues"]["create"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details. - */ - createComment: { - (params?: RestEndpointMethodTypes["issues"]["createComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - createLabel: { - (params?: RestEndpointMethodTypes["issues"]["createLabel"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - createMilestone: { - (params?: RestEndpointMethodTypes["issues"]["createMilestone"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deleteComment: { - (params?: RestEndpointMethodTypes["issues"]["deleteComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deleteLabel: { - (params?: RestEndpointMethodTypes["issues"]["deleteLabel"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deleteMilestone: { - (params?: RestEndpointMethodTypes["issues"]["deleteMilestone"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * The API returns a [`301 Moved Permanently` status](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-redirects-redirects) if the issue was - * [transferred](https://help.github.com/articles/transferring-an-issue-to-another-repository/) to another repository. If - * the issue was transferred to or deleted from a repository where the authenticated user lacks read access, the API - * returns a `404 Not Found` status. If the issue was deleted from a repository where the authenticated user has read - * access, the API returns a `410 Gone` status. To receive webhook events for transferred and deleted issues, subscribe - * to the [`issues`](https://docs.github.com/webhooks/event-payloads/#issues) webhook. - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - get: { - (params?: RestEndpointMethodTypes["issues"]["get"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getComment: { - (params?: RestEndpointMethodTypes["issues"]["getComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getEvent: { - (params?: RestEndpointMethodTypes["issues"]["getEvent"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getLabel: { - (params?: RestEndpointMethodTypes["issues"]["getLabel"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getMilestone: { - (params?: RestEndpointMethodTypes["issues"]["getMilestone"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List issues assigned to the authenticated user across all visible repositories including owned repositories, member - * repositories, and organization repositories. You can use the `filter` query parameter to fetch issues that are not - * necessarily assigned to you. - * - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - list: { - (params?: RestEndpointMethodTypes["issues"]["list"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the [available assignees](https://help.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) for issues in a repository. - */ - listAssignees: { - (params?: RestEndpointMethodTypes["issues"]["listAssignees"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Issue Comments are ordered by ascending ID. - */ - listComments: { - (params?: RestEndpointMethodTypes["issues"]["listComments"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * By default, Issue Comments are ordered by ascending ID. - */ - listCommentsForRepo: { - (params?: RestEndpointMethodTypes["issues"]["listCommentsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listEvents: { - (params?: RestEndpointMethodTypes["issues"]["listEvents"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listEventsForRepo: { - (params?: RestEndpointMethodTypes["issues"]["listEventsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listEventsForTimeline: { - (params?: RestEndpointMethodTypes["issues"]["listEventsForTimeline"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List issues across owned and member repositories assigned to the authenticated user. - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - listForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["issues"]["listForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List issues in an organization assigned to the authenticated user. - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - listForOrg: { - (params?: RestEndpointMethodTypes["issues"]["listForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List issues in a repository. - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - listForRepo: { - (params?: RestEndpointMethodTypes["issues"]["listForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listLabelsForMilestone: { - (params?: RestEndpointMethodTypes["issues"]["listLabelsForMilestone"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listLabelsForRepo: { - (params?: RestEndpointMethodTypes["issues"]["listLabelsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listLabelsOnIssue: { - (params?: RestEndpointMethodTypes["issues"]["listLabelsOnIssue"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listMilestones: { - (params?: RestEndpointMethodTypes["issues"]["listMilestones"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Users with push access can lock an issue or pull request's conversation. - * - * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - lock: { - (params?: RestEndpointMethodTypes["issues"]["lock"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - removeAllLabels: { - (params?: RestEndpointMethodTypes["issues"]["removeAllLabels"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes one or more assignees from an issue. - */ - removeAssignees: { - (params?: RestEndpointMethodTypes["issues"]["removeAssignees"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes the specified label from the issue, and returns the remaining labels on the issue. This endpoint returns a `404 Not Found` status if the label does not exist. - */ - removeLabel: { - (params?: RestEndpointMethodTypes["issues"]["removeLabel"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes any previous labels and sets the new labels for an issue. - */ - setLabels: { - (params?: RestEndpointMethodTypes["issues"]["setLabels"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Users with push access can unlock an issue's conversation. - */ - unlock: { - (params?: RestEndpointMethodTypes["issues"]["unlock"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Issue owners and users with push access can edit an issue. - */ - update: { - (params?: RestEndpointMethodTypes["issues"]["update"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - updateComment: { - (params?: RestEndpointMethodTypes["issues"]["updateComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - updateLabel: { - (params?: RestEndpointMethodTypes["issues"]["updateLabel"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - updateMilestone: { - (params?: RestEndpointMethodTypes["issues"]["updateMilestone"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - licenses: { - get: { - (params?: RestEndpointMethodTypes["licenses"]["get"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getAllCommonlyUsed: { - (params?: RestEndpointMethodTypes["licenses"]["getAllCommonlyUsed"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This method returns the contents of the repository's license file, if one is detected. - * - * Similar to [Get repository content](https://docs.github.com/rest/reference/repos#get-repository-content), this method also supports [custom media types](https://docs.github.com/rest/overview/media-types) for retrieving the raw license content or rendered license HTML. - */ - getForRepo: { - (params?: RestEndpointMethodTypes["licenses"]["getForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - markdown: { - render: { - (params?: RestEndpointMethodTypes["markdown"]["render"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less. - */ - renderRaw: { - (params?: RestEndpointMethodTypes["markdown"]["renderRaw"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - meta: { - /** - * Returns meta information about GitHub, including a list of GitHub's IP addresses. For more information, see "[About GitHub's IP addresses](https://help.github.com/articles/about-github-s-ip-addresses/)." - * - * **Note:** The IP addresses shown in the documentation's response are only example values. You must always query the API directly to get the latest list of IP addresses. - */ - get: { - (params?: RestEndpointMethodTypes["meta"]["get"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get the octocat as ASCII art - */ - getOctocat: { - (params?: RestEndpointMethodTypes["meta"]["getOctocat"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get a random sentence from the Zen of GitHub - */ - getZen: { - (params?: RestEndpointMethodTypes["meta"]["getZen"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get Hypermedia links to resources accessible in GitHub's REST API - */ - root: { - (params?: RestEndpointMethodTypes["meta"]["root"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - migrations: { - /** - * Stop an import for a repository. - */ - cancelImport: { - (params?: RestEndpointMethodTypes["migrations"]["cancelImport"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes a previous migration archive. Downloadable migration archives are automatically deleted after seven days. Migration metadata, which is returned in the [List user migrations](https://docs.github.com/rest/reference/migrations#list-user-migrations) and [Get a user migration status](https://docs.github.com/rest/reference/migrations#get-a-user-migration-status) endpoints, will continue to be available even after an archive is deleted. - */ - deleteArchiveForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["migrations"]["deleteArchiveForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes a previous migration archive. Migration archives are automatically deleted after seven days. - */ - deleteArchiveForOrg: { - (params?: RestEndpointMethodTypes["migrations"]["deleteArchiveForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Fetches the URL to a migration archive. - */ - downloadArchiveForOrg: { - (params?: RestEndpointMethodTypes["migrations"]["downloadArchiveForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Fetches the URL to download the migration archive as a `tar.gz` file. Depending on the resources your repository uses, the migration archive can contain JSON files with data for these objects: - * - * * attachments - * * bases - * * commit\_comments - * * issue\_comments - * * issue\_events - * * issues - * * milestones - * * organizations - * * projects - * * protected\_branches - * * pull\_request\_reviews - * * pull\_requests - * * releases - * * repositories - * * review\_comments - * * schema - * * users - * - * The archive will also contain an `attachments` directory that includes all attachment files uploaded to GitHub.com and a `repositories` directory that contains the repository's Git data. - */ - getArchiveForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["migrations"]["getArchiveForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Each type of source control system represents authors in a different way. For example, a Git commit author has a display name and an email address, but a Subversion commit author just has a username. The GitHub Importer will make the author information valid, but the author might not be correct. For example, it will change the bare Subversion username `hubot` into something like `hubot `. - * - * This endpoint and the [Map a commit author](https://docs.github.com/rest/reference/migrations#map-a-commit-author) endpoint allow you to provide correct Git author information. - */ - getCommitAuthors: { - (params?: RestEndpointMethodTypes["migrations"]["getCommitAuthors"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * View the progress of an import. - * - * **Import status** - * - * This section includes details about the possible values of the `status` field of the Import Progress response. - * - * An import that does not have errors will progress through these steps: - * - * * `detecting` - the "detection" step of the import is in progress because the request did not include a `vcs` parameter. The import is identifying the type of source control present at the URL. - * * `importing` - the "raw" step of the import is in progress. This is where commit data is fetched from the original repository. The import progress response will include `commit_count` (the total number of raw commits that will be imported) and `percent` (0 - 100, the current progress through the import). - * * `mapping` - the "rewrite" step of the import is in progress. This is where SVN branches are converted to Git branches, and where author updates are applied. The import progress response does not include progress information. - * * `pushing` - the "push" step of the import is in progress. This is where the importer updates the repository on GitHub. The import progress response will include `push_percent`, which is the percent value reported by `git push` when it is "Writing objects". - * * `complete` - the import is complete, and the repository is ready on GitHub. - * - * If there are problems, you will see one of these in the `status` field: - * - * * `auth_failed` - the import requires authentication in order to connect to the original repository. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. - * * `error` - the import encountered an error. The import progress response will include the `failed_step` and an error message. Contact [GitHub Support](https://support.github.com/contact) or [GitHub Premium Support](https://premium.githubsupport.com) for more information. - * * `detection_needs_auth` - the importer requires authentication for the originating repository to continue detection. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. - * * `detection_found_nothing` - the importer didn't recognize any source control at the URL. To resolve, [Cancel the import](https://docs.github.com/rest/reference/migrations#cancel-an-import) and [retry](https://docs.github.com/rest/reference/migrations#start-an-import) with the correct URL. - * * `detection_found_multiple` - the importer found several projects or repositories at the provided URL. When this is the case, the Import Progress response will also include a `project_choices` field with the possible project choices as values. To update project choice, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. - * - * **The project_choices field** - * - * When multiple projects are found at the provided URL, the response hash will include a `project_choices` field, the value of which is an array of hashes each representing a project choice. The exact key/value pairs of the project hashes will differ depending on the version control type. - * - * **Git LFS related fields** - * - * This section includes details about Git LFS related fields that may be present in the Import Progress response. - * - * * `use_lfs` - describes whether the import has been opted in or out of using Git LFS. The value can be `opt_in`, `opt_out`, or `undecided` if no action has been taken. - * * `has_large_files` - the boolean value describing whether files larger than 100MB were found during the `importing` step. - * * `large_files_size` - the total size in gigabytes of files larger than 100MB found in the originating repository. - * * `large_files_count` - the total number of files larger than 100MB found in the originating repository. To see a list of these files, make a "Get Large Files" request. - */ - getImportStatus: { - (params?: RestEndpointMethodTypes["migrations"]["getImportStatus"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List files larger than 100MB found during the import - */ - getLargeFiles: { - (params?: RestEndpointMethodTypes["migrations"]["getLargeFiles"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Fetches a single user migration. The response includes the `state` of the migration, which can be one of the following values: - * - * * `pending` - the migration hasn't started yet. - * * `exporting` - the migration is in progress. - * * `exported` - the migration finished successfully. - * * `failed` - the migration failed. - * - * Once the migration has been `exported` you can [download the migration archive](https://docs.github.com/rest/reference/migrations#download-a-user-migration-archive). - */ - getStatusForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["migrations"]["getStatusForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Fetches the status of a migration. - * - * The `state` of a migration can be one of the following values: - * - * * `pending`, which means the migration hasn't started yet. - * * `exporting`, which means the migration is in progress. - * * `exported`, which means the migration finished successfully. - * * `failed`, which means the migration failed. - */ - getStatusForOrg: { - (params?: RestEndpointMethodTypes["migrations"]["getStatusForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all migrations a user has started. - */ - listForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["migrations"]["listForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the most recent migrations. - */ - listForOrg: { - (params?: RestEndpointMethodTypes["migrations"]["listForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List all the repositories for this organization migration. - */ - listReposForOrg: { - (params?: RestEndpointMethodTypes["migrations"]["listReposForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all the repositories for this user migration. - */ - listReposForUser: { - (params?: RestEndpointMethodTypes["migrations"]["listReposForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Update an author's identity for the import. Your application can continue updating authors any time before you push new commits to the repository. - */ - mapCommitAuthor: { - (params?: RestEndpointMethodTypes["migrations"]["mapCommitAuthor"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * You can import repositories from Subversion, Mercurial, and TFS that include files larger than 100MB. This ability is powered by [Git LFS](https://git-lfs.github.com). You can learn more about our LFS feature and working with large files [on our help site](https://help.github.com/articles/versioning-large-files/). - */ - setLfsPreference: { - (params?: RestEndpointMethodTypes["migrations"]["setLfsPreference"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Initiates the generation of a user migration archive. - */ - startForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["migrations"]["startForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Initiates the generation of a migration archive. - */ - startForOrg: { - (params?: RestEndpointMethodTypes["migrations"]["startForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Start a source import to a GitHub repository using GitHub Importer. - */ - startImport: { - (params?: RestEndpointMethodTypes["migrations"]["startImport"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Unlocks a repository. You can lock repositories when you [start a user migration](https://docs.github.com/rest/reference/migrations#start-a-user-migration). Once the migration is complete you can unlock each repository to begin using it again or [delete the repository](https://docs.github.com/rest/reference/repos#delete-a-repository) if you no longer need the source data. Returns a status of `404 Not Found` if the repository is not locked. - */ - unlockRepoForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["migrations"]["unlockRepoForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Unlocks a repository that was locked for migration. You should unlock each migrated repository and [delete them](https://docs.github.com/rest/reference/repos#delete-a-repository) when the migration is complete and you no longer need the source data. - */ - unlockRepoForOrg: { - (params?: RestEndpointMethodTypes["migrations"]["unlockRepoForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * An import can be updated with credentials or a project choice by passing in the appropriate parameters in this API - * request. If no parameters are provided, the import will be restarted. - */ - updateImport: { - (params?: RestEndpointMethodTypes["migrations"]["updateImport"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - orgs: { - blockUser: { - (params?: RestEndpointMethodTypes["orgs"]["blockUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Cancel an organization invitation. In order to cancel an organization invitation, the authenticated user must be an organization owner. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). - */ - cancelInvitation: { - (params?: RestEndpointMethodTypes["orgs"]["cancelInvitation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - checkBlockedUser: { - (params?: RestEndpointMethodTypes["orgs"]["checkBlockedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Check if a user is, publicly or privately, a member of the organization. - */ - checkMembershipForUser: { - (params?: RestEndpointMethodTypes["orgs"]["checkMembershipForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - checkPublicMembershipForUser: { - (params?: RestEndpointMethodTypes["orgs"]["checkPublicMembershipForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * When an organization member is converted to an outside collaborator, they'll only have access to the repositories that their current team membership allows. The user will no longer be a member of the organization. For more information, see "[Converting an organization member to an outside collaborator](https://help.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)". - */ - convertMemberToOutsideCollaborator: { - (params?: RestEndpointMethodTypes["orgs"]["convertMemberToOutsideCollaborator"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Invite people to an organization by using their GitHub user ID or their email address. In order to create invitations in an organization, the authenticated user must be an organization owner. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - createInvitation: { - (params?: RestEndpointMethodTypes["orgs"]["createInvitation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Here's how you can create a hook that posts payloads in JSON format: - */ - createWebhook: { - (params?: RestEndpointMethodTypes["orgs"]["createWebhook"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deleteWebhook: { - (params?: RestEndpointMethodTypes["orgs"]["deleteWebhook"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * To see many of the organization response values, you need to be an authenticated organization owner with the `admin:org` scope. When the value of `two_factor_requirement_enabled` is `true`, the organization requires all members, billing managers, and outside collaborators to enable [two-factor authentication](https://help.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/). - * - * GitHub Apps with the `Organization plan` permission can use this endpoint to retrieve information about an organization's GitHub plan. See "[Authenticating with GitHub Apps](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/)" for details. For an example response, see 'Response with GitHub plan information' below." - */ - get: { - (params?: RestEndpointMethodTypes["orgs"]["get"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getMembershipForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["orgs"]["getMembershipForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * In order to get a user's membership with an organization, the authenticated user must be an organization member. The `state` parameter in the response can be used to identify the user's membership status. - */ - getMembershipForUser: { - (params?: RestEndpointMethodTypes["orgs"]["getMembershipForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns a webhook configured in an organization. To get only the webhook `config` properties, see "[Get a webhook configuration for an organization](/rest/reference/orgs#get-a-webhook-configuration-for-an-organization)." - */ - getWebhook: { - (params?: RestEndpointMethodTypes["orgs"]["getWebhook"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns the webhook configuration for an organization. To get more information about the webhook, including the `active` state and `events`, use "[Get an organization webhook ](/rest/reference/orgs#get-an-organization-webhook)." - * - * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:read` permission. - */ - getWebhookConfigForOrg: { - (params?: RestEndpointMethodTypes["orgs"]["getWebhookConfigForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all organizations, in the order that they were created on GitHub. - * - * **Note:** Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of organizations. - */ - list: { - (params?: RestEndpointMethodTypes["orgs"]["list"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all GitHub Apps in an organization. The installation count includes all GitHub Apps installed on repositories in the organization. You must be an organization owner with `admin:read` scope to use this endpoint. - */ - listAppInstallations: { - (params?: RestEndpointMethodTypes["orgs"]["listAppInstallations"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List the users blocked by an organization. - */ - listBlockedUsers: { - (params?: RestEndpointMethodTypes["orgs"]["listBlockedUsers"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * The return hash contains `failed_at` and `failed_reason` fields which represent the time at which the invitation failed and the reason for the failure. - */ - listFailedInvitations: { - (params?: RestEndpointMethodTypes["orgs"]["listFailedInvitations"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List organizations for the authenticated user. - * - * **OAuth scope requirements** - * - * This only lists organizations that your authorization allows you to operate on in some way (e.g., you can list teams with `read:org` scope, you can publicize your organization membership with `user` scope, etc.). Therefore, this API requires at least `user` or `read:org` scope. OAuth requests with insufficient scope receive a `403 Forbidden` response. - */ - listForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["orgs"]["listForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List [public organization memberships](https://help.github.com/articles/publicizing-or-concealing-organization-membership) for the specified user. - * - * This method only lists _public_ memberships, regardless of authentication. If you need to fetch all of the organization memberships (public and private) for the authenticated user, use the [List organizations for the authenticated user](https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user) API instead. - */ - listForUser: { - (params?: RestEndpointMethodTypes["orgs"]["listForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List all teams associated with an invitation. In order to see invitations in an organization, the authenticated user must be an organization owner. - */ - listInvitationTeams: { - (params?: RestEndpointMethodTypes["orgs"]["listInvitationTeams"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List all users who are members of an organization. If the authenticated user is also a member of this organization then both concealed and public members will be returned. - */ - listMembers: { - (params?: RestEndpointMethodTypes["orgs"]["listMembers"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listMembershipsForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["orgs"]["listMembershipsForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List all users who are outside collaborators of an organization. - */ - listOutsideCollaborators: { - (params?: RestEndpointMethodTypes["orgs"]["listOutsideCollaborators"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. - */ - listPendingInvitations: { - (params?: RestEndpointMethodTypes["orgs"]["listPendingInvitations"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Members of an organization can choose to have their membership publicized or not. - */ - listPublicMembers: { - (params?: RestEndpointMethodTypes["orgs"]["listPublicMembers"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listWebhooks: { - (params?: RestEndpointMethodTypes["orgs"]["listWebhooks"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. - */ - pingWebhook: { - (params?: RestEndpointMethodTypes["orgs"]["pingWebhook"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removing a user from this list will remove them from all teams and they will no longer have any access to the organization's repositories. - */ - removeMember: { - (params?: RestEndpointMethodTypes["orgs"]["removeMember"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * In order to remove a user's membership with an organization, the authenticated user must be an organization owner. - * - * If the specified user is an active member of the organization, this will remove them from the organization. If the specified user has been invited to the organization, this will cancel their invitation. The specified user will receive an email notification in both cases. - */ - removeMembershipForUser: { - (params?: RestEndpointMethodTypes["orgs"]["removeMembershipForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removing a user from this list will remove them from all the organization's repositories. - */ - removeOutsideCollaborator: { - (params?: RestEndpointMethodTypes["orgs"]["removeOutsideCollaborator"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - removePublicMembershipForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["orgs"]["removePublicMembershipForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Only authenticated organization owners can add a member to the organization or update the member's role. - * - * * If the authenticated user is _adding_ a member to the organization, the invited user will receive an email inviting them to the organization. The user's [membership status](https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user) will be `pending` until they accept the invitation. - * - * * Authenticated users can _update_ a user's membership by passing the `role` parameter. If the authenticated user changes a member's role to `admin`, the affected user will receive an email notifying them that they've been made an organization owner. If the authenticated user changes an owner's role to `member`, no email will be sent. - * - * **Rate limits** - * - * To prevent abuse, the authenticated user is limited to 50 organization invitations per 24 hour period. If the organization is more than one month old or on a paid plan, the limit is 500 invitations per 24 hour period. - */ - setMembershipForUser: { - (params?: RestEndpointMethodTypes["orgs"]["setMembershipForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * The user can publicize their own membership. (A user cannot publicize the membership for another user.) - * - * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - setPublicMembershipForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["orgs"]["setPublicMembershipForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - unblockUser: { - (params?: RestEndpointMethodTypes["orgs"]["unblockUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Parameter Deprecation Notice:** GitHub will replace and discontinue `members_allowed_repository_creation_type` in favor of more granular permissions. The new input parameters are `members_can_create_public_repositories`, `members_can_create_private_repositories` for all organizations and `members_can_create_internal_repositories` for organizations associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see the [blog post](https://developer.github.com/changes/2019-12-03-internal-visibility-changes). - * - * Enables an authenticated organization owner with the `admin:org` scope to update the organization's profile and member privileges. - */ - update: { - (params?: RestEndpointMethodTypes["orgs"]["update"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - updateMembershipForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["orgs"]["updateMembershipForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Updates a webhook configured in an organization. When you update a webhook, the `secret` will be overwritten. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for an organization](/rest/reference/orgs#update-a-webhook-configuration-for-an-organization)." - */ - updateWebhook: { - (params?: RestEndpointMethodTypes["orgs"]["updateWebhook"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Updates the webhook configuration for an organization. To update more information about the webhook, including the `active` state and `events`, use "[Update an organization webhook ](/rest/reference/orgs#update-an-organization-webhook)." - * - * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:write` permission. - */ - updateWebhookConfigForOrg: { - (params?: RestEndpointMethodTypes["orgs"]["updateWebhookConfigForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - packages: { - /** - * Deletes a package owned by the authenticated user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - deletePackageForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["packages"]["deletePackageForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes an entire package in an organization. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` scope. In addition: - * - If `package_type` is not `container`, your token must also include the `repo` scope. - * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. - */ - deletePackageForOrg: { - (params?: RestEndpointMethodTypes["packages"]["deletePackageForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes a specific package version for a package owned by the authenticated user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - deletePackageVersionForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["packages"]["deletePackageVersionForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes a specific package version in an organization. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` scope. In addition: - * - If `package_type` is not `container`, your token must also include the `repo` scope. - * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. - */ - deletePackageVersionForOrg: { - (params?: RestEndpointMethodTypes["packages"]["deletePackageVersionForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns all package versions for a package owned by an organization. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - * @deprecated octokit.rest.packages.getAllPackageVersionsForAPackageOwnedByAnOrg() has been renamed to octokit.rest.packages.getAllPackageVersionsForPackageOwnedByOrg() (2021-03-24) - */ - getAllPackageVersionsForAPackageOwnedByAnOrg: { - (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForAPackageOwnedByAnOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns all package versions for a package owned by the authenticated user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - * @deprecated octokit.rest.packages.getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser() has been renamed to octokit.rest.packages.getAllPackageVersionsForPackageOwnedByAuthenticatedUser() (2021-03-24) - */ - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: { - (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns all package versions for a package owned by the authenticated user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: { - (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForPackageOwnedByAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns all package versions for a package owned by an organization. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - getAllPackageVersionsForPackageOwnedByOrg: { - (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForPackageOwnedByOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns all package versions for a public package owned by a specified user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - getAllPackageVersionsForPackageOwnedByUser: { - (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForPackageOwnedByUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specific package for a package owned by the authenticated user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - getPackageForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["packages"]["getPackageForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specific package in an organization. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - getPackageForOrganization: { - (params?: RestEndpointMethodTypes["packages"]["getPackageForOrganization"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specific package metadata for a public package owned by a user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - getPackageForUser: { - (params?: RestEndpointMethodTypes["packages"]["getPackageForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specific package version for a package owned by the authenticated user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - getPackageVersionForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["packages"]["getPackageVersionForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specific package version in an organization. - * - * You must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - getPackageVersionForOrganization: { - (params?: RestEndpointMethodTypes["packages"]["getPackageVersionForOrganization"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a specific package version for a public package owned by a specified user. - * - * At this time, to use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - getPackageVersionForUser: { - (params?: RestEndpointMethodTypes["packages"]["getPackageVersionForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Restores a package owned by the authenticated user. - * - * You can restore a deleted package under the following conditions: - * - The package was deleted within the last 30 days. - * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scope. If `package_type` is not `container`, your token must also include the `repo` scope. - */ - restorePackageForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["packages"]["restorePackageForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Restores an entire package in an organization. - * - * You can restore a deleted package under the following conditions: - * - The package was deleted within the last 30 days. - * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scope. In addition: - * - If `package_type` is not `container`, your token must also include the `repo` scope. - * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. - */ - restorePackageForOrg: { - (params?: RestEndpointMethodTypes["packages"]["restorePackageForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Restores a package version owned by the authenticated user. - * - * You can restore a deleted package version under the following conditions: - * - The package was deleted within the last 30 days. - * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scope. If `package_type` is not `container`, your token must also include the `repo` scope. - */ - restorePackageVersionForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["packages"]["restorePackageVersionForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Restores a specific package version in an organization. - * - * You can restore a deleted package under the following conditions: - * - The package was deleted within the last 30 days. - * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scope. In addition: - * - If `package_type` is not `container`, your token must also include the `repo` scope. - * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. - */ - restorePackageVersionForOrg: { - (params?: RestEndpointMethodTypes["packages"]["restorePackageVersionForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - projects: { - /** - * Adds a collaborator to an organization project and sets their permission level. You must be an organization owner or a project `admin` to add a collaborator. - */ - addCollaborator: { - (params?: RestEndpointMethodTypes["projects"]["addCollaborator"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by the `pull_request` key. - * - * Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - createCard: { - (params?: RestEndpointMethodTypes["projects"]["createCard"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - createColumn: { - (params?: RestEndpointMethodTypes["projects"]["createColumn"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - createForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["projects"]["createForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates an organization project board. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. - */ - createForOrg: { - (params?: RestEndpointMethodTypes["projects"]["createForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates a repository project board. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. - */ - createForRepo: { - (params?: RestEndpointMethodTypes["projects"]["createForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes a project board. Returns a `404 Not Found` status if projects are disabled. - */ - delete: { - (params?: RestEndpointMethodTypes["projects"]["delete"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deleteCard: { - (params?: RestEndpointMethodTypes["projects"]["deleteCard"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deleteColumn: { - (params?: RestEndpointMethodTypes["projects"]["deleteColumn"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a project by its `id`. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. - */ - get: { - (params?: RestEndpointMethodTypes["projects"]["get"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getCard: { - (params?: RestEndpointMethodTypes["projects"]["getCard"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getColumn: { - (params?: RestEndpointMethodTypes["projects"]["getColumn"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns the collaborator's permission level for an organization project. Possible values for the `permission` key: `admin`, `write`, `read`, `none`. You must be an organization owner or a project `admin` to review a user's permission level. - */ - getPermissionForUser: { - (params?: RestEndpointMethodTypes["projects"]["getPermissionForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listCards: { - (params?: RestEndpointMethodTypes["projects"]["listCards"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the collaborators for an organization project. For a project, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. You must be an organization owner or a project `admin` to list collaborators. - */ - listCollaborators: { - (params?: RestEndpointMethodTypes["projects"]["listCollaborators"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listColumns: { - (params?: RestEndpointMethodTypes["projects"]["listColumns"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the projects in an organization. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. - */ - listForOrg: { - (params?: RestEndpointMethodTypes["projects"]["listForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the projects in a repository. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. - */ - listForRepo: { - (params?: RestEndpointMethodTypes["projects"]["listForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listForUser: { - (params?: RestEndpointMethodTypes["projects"]["listForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - moveCard: { - (params?: RestEndpointMethodTypes["projects"]["moveCard"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - moveColumn: { - (params?: RestEndpointMethodTypes["projects"]["moveColumn"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes a collaborator from an organization project. You must be an organization owner or a project `admin` to remove a collaborator. - */ - removeCollaborator: { - (params?: RestEndpointMethodTypes["projects"]["removeCollaborator"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Updates a project board's information. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. - */ - update: { - (params?: RestEndpointMethodTypes["projects"]["update"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - updateCard: { - (params?: RestEndpointMethodTypes["projects"]["updateCard"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - updateColumn: { - (params?: RestEndpointMethodTypes["projects"]["updateColumn"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - pulls: { - checkIfMerged: { - (params?: RestEndpointMethodTypes["pulls"]["checkIfMerged"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. - * - * You can create a new pull request. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - create: { - (params?: RestEndpointMethodTypes["pulls"]["create"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates a reply to a review comment for a pull request. For the `comment_id`, provide the ID of the review comment you are replying to. This must be the ID of a _top-level review comment_, not a reply to that comment. Replies to replies are not supported. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - createReplyForReviewComment: { - (params?: RestEndpointMethodTypes["pulls"]["createReplyForReviewComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - * - * Pull request reviews created in the `PENDING` state do not include the `submitted_at` property in the response. - * - * **Note:** To comment on a specific line in a file, you need to first determine the _position_ of that line in the diff. The GitHub REST API v3 offers the `application/vnd.github.v3.diff` [media type](https://docs.github.com/rest/overview/media-types#commits-commit-comparison-and-pull-requests). To see a pull request diff, add this media type to the `Accept` header of a call to the [single pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) endpoint. - * - * The `position` value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. - */ - createReview: { - (params?: RestEndpointMethodTypes["pulls"]["createReview"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates a review comment in the pull request diff. To add a regular comment to a pull request timeline, see "[Create an issue comment](https://docs.github.com/rest/reference/issues#create-an-issue-comment)." We recommend creating a review comment using `line`, `side`, and optionally `start_line` and `start_side` if your comment applies to more than one line in the pull request diff. - * - * You can still create a review comment using the `position` parameter. When you use `position`, the `line`, `side`, `start_line`, and `start_side` parameters are not required. For more information, see the [`comfort-fade` preview notice](https://docs.github.com/rest/reference/pulls#create-a-review-comment-for-a-pull-request-preview-notices). - * - * **Note:** The position value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - createReviewComment: { - (params?: RestEndpointMethodTypes["pulls"]["createReviewComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deletePendingReview: { - (params?: RestEndpointMethodTypes["pulls"]["deletePendingReview"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes a review comment. - */ - deleteReviewComment: { - (params?: RestEndpointMethodTypes["pulls"]["deleteReviewComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** To dismiss a pull request review on a [protected branch](https://docs.github.com/rest/reference/repos#branches), you must be a repository administrator or be included in the list of people or teams who can dismiss pull request reviews. - */ - dismissReview: { - (params?: RestEndpointMethodTypes["pulls"]["dismissReview"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists details of a pull request by providing its number. - * - * When you get, [create](https://docs.github.com/rest/reference/pulls/#create-a-pull-request), or [edit](https://docs.github.com/rest/reference/pulls#update-a-pull-request) a pull request, GitHub creates a merge commit to test whether the pull request can be automatically merged into the base branch. This test commit is not added to the base branch or the head branch. You can review the status of the test commit using the `mergeable` key. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". - * - * The value of the `mergeable` attribute can be `true`, `false`, or `null`. If the value is `null`, then GitHub has started a background job to compute the mergeability. After giving the job time to complete, resubmit the request. When the job finishes, you will see a non-`null` value for the `mergeable` attribute in the response. If `mergeable` is `true`, then `merge_commit_sha` will be the SHA of the _test_ merge commit. - * - * The value of the `merge_commit_sha` attribute changes depending on the state of the pull request. Before merging a pull request, the `merge_commit_sha` attribute holds the SHA of the _test_ merge commit. After merging a pull request, the `merge_commit_sha` attribute changes depending on how you merged the pull request: - * - * * If merged as a [merge commit](https://help.github.com/articles/about-merge-methods-on-github/), `merge_commit_sha` represents the SHA of the merge commit. - * * If merged via a [squash](https://help.github.com/articles/about-merge-methods-on-github/#squashing-your-merge-commits), `merge_commit_sha` represents the SHA of the squashed commit on the base branch. - * * If [rebased](https://help.github.com/articles/about-merge-methods-on-github/#rebasing-and-merging-your-commits), `merge_commit_sha` represents the commit that the base branch was updated to. - * - * Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. - */ - get: { - (params?: RestEndpointMethodTypes["pulls"]["get"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getReview: { - (params?: RestEndpointMethodTypes["pulls"]["getReview"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Provides details for a review comment. - */ - getReviewComment: { - (params?: RestEndpointMethodTypes["pulls"]["getReviewComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - */ - list: { - (params?: RestEndpointMethodTypes["pulls"]["list"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List comments for a specific pull request review. - */ - listCommentsForReview: { - (params?: RestEndpointMethodTypes["pulls"]["listCommentsForReview"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists a maximum of 250 commits for a pull request. To receive a complete commit list for pull requests with more than 250 commits, use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) endpoint. - */ - listCommits: { - (params?: RestEndpointMethodTypes["pulls"]["listCommits"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** Responses include a maximum of 3000 files. The paginated response returns 30 files per page by default. - */ - listFiles: { - (params?: RestEndpointMethodTypes["pulls"]["listFiles"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listRequestedReviewers: { - (params?: RestEndpointMethodTypes["pulls"]["listRequestedReviewers"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all review comments for a pull request. By default, review comments are in ascending order by ID. - */ - listReviewComments: { - (params?: RestEndpointMethodTypes["pulls"]["listReviewComments"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists review comments for all pull requests in a repository. By default, review comments are in ascending order by ID. - */ - listReviewCommentsForRepo: { - (params?: RestEndpointMethodTypes["pulls"]["listReviewCommentsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * The list of reviews returns in chronological order. - */ - listReviews: { - (params?: RestEndpointMethodTypes["pulls"]["listReviews"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details. - */ - merge: { - (params?: RestEndpointMethodTypes["pulls"]["merge"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - removeRequestedReviewers: { - (params?: RestEndpointMethodTypes["pulls"]["removeRequestedReviewers"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details. - */ - requestReviewers: { - (params?: RestEndpointMethodTypes["pulls"]["requestReviewers"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - submitReview: { - (params?: RestEndpointMethodTypes["pulls"]["submitReview"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. - */ - update: { - (params?: RestEndpointMethodTypes["pulls"]["update"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Updates the pull request branch with the latest upstream changes by merging HEAD from the base branch into the pull request branch. - */ - updateBranch: { - (params?: RestEndpointMethodTypes["pulls"]["updateBranch"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Update the review summary comment with new text. - */ - updateReview: { - (params?: RestEndpointMethodTypes["pulls"]["updateReview"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Enables you to edit a review comment. - */ - updateReviewComment: { - (params?: RestEndpointMethodTypes["pulls"]["updateReviewComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - rateLimit: { - /** - * **Note:** Accessing this endpoint does not count against your REST API rate limit. - * - * **Note:** The `rate` object is deprecated. If you're writing new API client code or updating existing code, you should use the `core` object instead of the `rate` object. The `core` object contains the same information that is present in the `rate` object. - */ - get: { - (params?: RestEndpointMethodTypes["rateLimit"]["get"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - reactions: { - /** - * Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this commit comment. - */ - createForCommitComment: { - (params?: RestEndpointMethodTypes["reactions"]["createForCommitComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with a `Status: 200 OK` means that you already added the reaction type to this issue. - */ - createForIssue: { - (params?: RestEndpointMethodTypes["reactions"]["createForIssue"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this issue comment. - */ - createForIssueComment: { - (params?: RestEndpointMethodTypes["reactions"]["createForIssueComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this pull request review comment. - */ - createForPullRequestReviewComment: { - (params?: RestEndpointMethodTypes["reactions"]["createForPullRequestReviewComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion comment. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. - */ - createForTeamDiscussionCommentInOrg: { - (params?: RestEndpointMethodTypes["reactions"]["createForTeamDiscussionCommentInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. - */ - createForTeamDiscussionInOrg: { - (params?: RestEndpointMethodTypes["reactions"]["createForTeamDiscussionInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/comments/:comment_id/reactions/:reaction_id`. - * - * Delete a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). - */ - deleteForCommitComment: { - (params?: RestEndpointMethodTypes["reactions"]["deleteForCommitComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/issues/:issue_number/reactions/:reaction_id`. - * - * Delete a reaction to an [issue](https://docs.github.com/rest/reference/issues/). - */ - deleteForIssue: { - (params?: RestEndpointMethodTypes["reactions"]["deleteForIssue"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/issues/comments/:comment_id/reactions/:reaction_id`. - * - * Delete a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). - */ - deleteForIssueComment: { - (params?: RestEndpointMethodTypes["reactions"]["deleteForIssueComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/pulls/comments/:comment_id/reactions/:reaction_id.` - * - * Delete a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). - */ - deleteForPullRequestComment: { - (params?: RestEndpointMethodTypes["reactions"]["deleteForPullRequestComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions/:reaction_id`. - * - * Delete a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - deleteForTeamDiscussion: { - (params?: RestEndpointMethodTypes["reactions"]["deleteForTeamDiscussion"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions/:reaction_id`. - * - * Delete a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - deleteForTeamDiscussionComment: { - (params?: RestEndpointMethodTypes["reactions"]["deleteForTeamDiscussionComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Reactions API. We recommend migrating your existing code to use the new delete reactions endpoints. For more information, see this [blog post](https://developer.github.com/changes/2020-02-26-new-delete-reactions-endpoints/). - * - * OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), when deleting a [team discussion](https://docs.github.com/rest/reference/teams#discussions) or [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). - * @deprecated octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy - */ - deleteLegacy: { - (params?: RestEndpointMethodTypes["reactions"]["deleteLegacy"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List the reactions to a [commit comment](https://docs.github.com/rest/reference/repos#comments). - */ - listForCommitComment: { - (params?: RestEndpointMethodTypes["reactions"]["listForCommitComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List the reactions to an [issue](https://docs.github.com/rest/reference/issues). - */ - listForIssue: { - (params?: RestEndpointMethodTypes["reactions"]["listForIssue"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List the reactions to an [issue comment](https://docs.github.com/rest/reference/issues#comments). - */ - listForIssueComment: { - (params?: RestEndpointMethodTypes["reactions"]["listForIssueComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List the reactions to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). - */ - listForPullRequestReviewComment: { - (params?: RestEndpointMethodTypes["reactions"]["listForPullRequestReviewComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments/). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. - */ - listForTeamDiscussionCommentInOrg: { - (params?: RestEndpointMethodTypes["reactions"]["listForTeamDiscussionCommentInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. - */ - listForTeamDiscussionInOrg: { - (params?: RestEndpointMethodTypes["reactions"]["listForTeamDiscussionInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - repos: { - acceptInvitation: { - (params?: RestEndpointMethodTypes["repos"]["acceptInvitation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Grants the specified apps push access for this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. - * - * | Type | Description | - * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - addAppAccessRestrictions: { - (params?: RestEndpointMethodTypes["repos"]["addAppAccessRestrictions"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - * - * For more information the permission levels, see "[Repository permission levels for an organization](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". - * - * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - * - * The invitee will receive a notification that they have been invited to the repository, which they must accept or decline. They may do this via the notifications page, the email they receive, or by using the [repository invitations API endpoints](https://docs.github.com/rest/reference/repos#invitations). - * - * **Rate limits** - * - * To prevent abuse, you are limited to sending 50 invitations to a repository per 24 hour period. Note there is no limit if you are inviting organization members to an organization repository. - */ - addCollaborator: { - (params?: RestEndpointMethodTypes["repos"]["addCollaborator"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - */ - addStatusCheckContexts: { - (params?: RestEndpointMethodTypes["repos"]["addStatusCheckContexts"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Grants the specified teams push access for this branch. You can also give push access to child teams. - * - * | Type | Description | - * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | - * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - addTeamAccessRestrictions: { - (params?: RestEndpointMethodTypes["repos"]["addTeamAccessRestrictions"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Grants the specified people push access for this branch. - * - * | Type | Description | - * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | - * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - addUserAccessRestrictions: { - (params?: RestEndpointMethodTypes["repos"]["addUserAccessRestrictions"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. - * - * Team members will include the members of child teams. - */ - checkCollaborator: { - (params?: RestEndpointMethodTypes["repos"]["checkCollaborator"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Shows whether dependency alerts are enabled or disabled for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://help.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". - */ - checkVulnerabilityAlerts: { - (params?: RestEndpointMethodTypes["repos"]["checkVulnerabilityAlerts"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. - * - * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. - * - * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. - * - * **Working with large comparisons** - * - * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." - * - * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - compareCommits: { - (params?: RestEndpointMethodTypes["repos"]["compareCommits"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Create a comment for a commit using its `:commit_sha`. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - createCommitComment: { - (params?: RestEndpointMethodTypes["repos"]["createCommitComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * When authenticated with admin or owner permissions to the repository, you can use this endpoint to require signed commits on a branch. You must enable branch protection to require signed commits. - */ - createCommitSignatureProtection: { - (params?: RestEndpointMethodTypes["repos"]["createCommitSignatureProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Users with push access in a repository can create commit statuses for a given SHA. - * - * Note: there is a limit of 1000 statuses per `sha` and `context` within a repository. Attempts to create more than 1000 statuses will result in a validation error. - */ - createCommitStatus: { - (params?: RestEndpointMethodTypes["repos"]["createCommitStatus"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * You can create a read-only deploy key. - */ - createDeployKey: { - (params?: RestEndpointMethodTypes["repos"]["createDeployKey"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deployments offer a few configurable parameters with certain defaults. - * - * The `ref` parameter can be any named branch, tag, or SHA. At GitHub we often deploy branches and verify them - * before we merge a pull request. - * - * The `environment` parameter allows deployments to be issued to different runtime environments. Teams often have - * multiple environments for verifying their applications, such as `production`, `staging`, and `qa`. This parameter - * makes it easier to track which environments have requested deployments. The default environment is `production`. - * - * The `auto_merge` parameter is used to ensure that the requested ref is not behind the repository's default branch. If - * the ref _is_ behind the default branch for the repository, we will attempt to merge it for you. If the merge succeeds, - * the API will return a successful merge commit. If merge conflicts prevent the merge from succeeding, the API will - * return a failure response. - * - * By default, [commit statuses](https://docs.github.com/rest/reference/repos#statuses) for every submitted context must be in a `success` - * state. The `required_contexts` parameter allows you to specify a subset of contexts that must be `success`, or to - * specify contexts that have not yet been submitted. You are not required to use commit statuses to deploy. If you do - * not require any contexts or create any commit statuses, the deployment will always succeed. - * - * The `payload` parameter is available for any extra information that a deployment system might need. It is a JSON text - * field that will be passed on when a deployment event is dispatched. - * - * The `task` parameter is used by the deployment system to allow different execution paths. In the web world this might - * be `deploy:migrations` to run schema changes on the system. In the compiled world this could be a flag to compile an - * application with debugging enabled. - * - * Users with `repo` or `repo_deployment` scopes can create a deployment for a given ref. - * - * #### Merged branch response - * You will see this response when GitHub automatically merges the base branch into the topic branch instead of creating - * a deployment. This auto-merge happens when: - * * Auto-merge option is enabled in the repository - * * Topic branch does not include the latest changes on the base branch, which is `master` in the response example - * * There are no merge conflicts - * - * If there are no new commits in the base branch, a new request to create a deployment should give a successful - * response. - * - * #### Merge conflict response - * This error happens when the `auto_merge` option is enabled and when the default branch (in this case `master`), can't - * be merged into the branch that's being deployed (in this case `topic-branch`), due to merge conflicts. - * - * #### Failed commit status checks - * This error happens when the `required_contexts` parameter indicates that one or more contexts need to have a `success` - * status for the commit to be deployed, but one or more of the required contexts do not have a state of `success`. - */ - createDeployment: { - (params?: RestEndpointMethodTypes["repos"]["createDeployment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Users with `push` access can create deployment statuses for a given deployment. - * - * GitHub Apps require `read & write` access to "Deployments" and `read-only` access to "Repo contents" (for private repos). OAuth Apps require the `repo_deployment` scope. - */ - createDeploymentStatus: { - (params?: RestEndpointMethodTypes["repos"]["createDeploymentStatus"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * You can use this endpoint to trigger a webhook event called `repository_dispatch` when you want activity that happens outside of GitHub to trigger a GitHub Actions workflow or GitHub App webhook. You must configure your GitHub Actions workflow or GitHub App to run when the `repository_dispatch` event occurs. For an example `repository_dispatch` webhook payload, see "[RepositoryDispatchEvent](https://docs.github.com/webhooks/event-payloads/#repository_dispatch)." - * - * The `client_payload` parameter is available for any extra information that your workflow might need. This parameter is a JSON payload that will be passed on when the webhook event is dispatched. For example, the `client_payload` can include a message that a user would like to send using a GitHub Actions workflow. Or the `client_payload` can be used as a test to debug your workflow. - * - * This endpoint requires write access to the repository by providing either: - * - * - Personal access tokens with `repo` scope. For more information, see "[Creating a personal access token for the command line](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line)" in the GitHub Help documentation. - * - GitHub Apps with both `metadata:read` and `contents:read&write` permissions. - * - * This input example shows how you can use the `client_payload` as a test to debug your workflow. - */ - createDispatchEvent: { - (params?: RestEndpointMethodTypes["repos"]["createDispatchEvent"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates a new repository for the authenticated user. - * - * **OAuth scope requirements** - * - * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: - * - * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. - * * `repo` scope to create a private repository. - */ - createForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["repos"]["createForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Create a fork for the authenticated user. - * - * **Note**: Forking a Repository happens asynchronously. You may have to wait a short period of time before you can access the git objects. If this takes longer than 5 minutes, be sure to contact [GitHub Support](https://support.github.com/contact) or [GitHub Premium Support](https://premium.githubsupport.com). - */ - createFork: { - (params?: RestEndpointMethodTypes["repos"]["createFork"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates a new repository in the specified organization. The authenticated user must be a member of the organization. - * - * **OAuth scope requirements** - * - * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: - * - * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. - * * `repo` scope to create a private repository - */ - createInOrg: { - (params?: RestEndpointMethodTypes["repos"]["createInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Create or update an environment with protection rules, such as required reviewers. For more information about environment protection rules, see "[Environments](/actions/reference/environments#environment-protection-rules)." - * - * **Note:** Although you can use this operation to specify that only branches that match specified name patterns can deploy to this environment, you must use the UI to set the name patterns. For more information, see "[Environments](/actions/reference/environments#deployment-branches)." - * - * **Note:** To create or update secrets for an environment, see "[Secrets](/rest/reference/actions#secrets)." - * - * You must authenticate using an access token with the repo scope to use this endpoint. - */ - createOrUpdateEnvironment: { - (params?: RestEndpointMethodTypes["repos"]["createOrUpdateEnvironment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates a new file or replaces an existing file in a repository. - */ - createOrUpdateFileContents: { - (params?: RestEndpointMethodTypes["repos"]["createOrUpdateFileContents"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Configures a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages)." - */ - createPagesSite: { - (params?: RestEndpointMethodTypes["repos"]["createPagesSite"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Users with push access to the repository can create a release. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - createRelease: { - (params?: RestEndpointMethodTypes["repos"]["createRelease"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates a new repository using a repository template. Use the `template_owner` and `template_repo` route parameters to specify the repository to use as the template. The authenticated user must own or be a member of an organization that owns the repository. To check if a repository is available to use as a template, get the repository's information using the [Get a repository](https://docs.github.com/rest/reference/repos#get-a-repository) endpoint and check that the `is_template` key is `true`. - * - * **OAuth scope requirements** - * - * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: - * - * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. - * * `repo` scope to create a private repository - */ - createUsingTemplate: { - (params?: RestEndpointMethodTypes["repos"]["createUsingTemplate"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Repositories can have multiple webhooks installed. Each webhook should have a unique `config`. Multiple webhooks can - * share the same `config` as long as those webhooks do not have any `events` that overlap. - */ - createWebhook: { - (params?: RestEndpointMethodTypes["repos"]["createWebhook"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - declineInvitation: { - (params?: RestEndpointMethodTypes["repos"]["declineInvitation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required. - * - * If an organization owner has configured the organization to prevent members from deleting organization-owned - * repositories, you will get a `403 Forbidden` response. - */ - delete: { - (params?: RestEndpointMethodTypes["repos"]["delete"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Disables the ability to restrict who can push to this branch. - */ - deleteAccessRestrictions: { - (params?: RestEndpointMethodTypes["repos"]["deleteAccessRestrictions"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Removing admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. - */ - deleteAdminBranchProtection: { - (params?: RestEndpointMethodTypes["repos"]["deleteAdminBranchProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * You must authenticate using an access token with the repo scope to use this endpoint. - */ - deleteAnEnvironment: { - (params?: RestEndpointMethodTypes["repos"]["deleteAnEnvironment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - */ - deleteBranchProtection: { - (params?: RestEndpointMethodTypes["repos"]["deleteBranchProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deleteCommitComment: { - (params?: RestEndpointMethodTypes["repos"]["deleteCommitComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * When authenticated with admin or owner permissions to the repository, you can use this endpoint to disable required signed commits on a branch. You must enable branch protection to require signed commits. - */ - deleteCommitSignatureProtection: { - (params?: RestEndpointMethodTypes["repos"]["deleteCommitSignatureProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deploy keys are immutable. If you need to update a key, remove the key and create a new one instead. - */ - deleteDeployKey: { - (params?: RestEndpointMethodTypes["repos"]["deleteDeployKey"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * To ensure there can always be an active deployment, you can only delete an _inactive_ deployment. Anyone with `repo` or `repo_deployment` scopes can delete an inactive deployment. - * - * To set a deployment as inactive, you must: - * - * * Create a new deployment that is active so that the system has a record of the current state, then delete the previously active deployment. - * * Mark the active deployment as inactive by adding any non-successful deployment status. - * - * For more information, see "[Create a deployment](https://docs.github.com/rest/reference/repos/#create-a-deployment)" and "[Create a deployment status](https://docs.github.com/rest/reference/repos#create-a-deployment-status)." - */ - deleteDeployment: { - (params?: RestEndpointMethodTypes["repos"]["deleteDeployment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes a file in a repository. - * - * You can provide an additional `committer` parameter, which is an object containing information about the committer. Or, you can provide an `author` parameter, which is an object containing information about the author. - * - * The `author` section is optional and is filled in with the `committer` information if omitted. If the `committer` information is omitted, the authenticated user's information is used. - * - * You must provide values for both `name` and `email`, whether you choose to use `author` or `committer`. Otherwise, you'll receive a `422` status code. - */ - deleteFile: { - (params?: RestEndpointMethodTypes["repos"]["deleteFile"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deleteInvitation: { - (params?: RestEndpointMethodTypes["repos"]["deleteInvitation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deletePagesSite: { - (params?: RestEndpointMethodTypes["repos"]["deletePagesSite"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - */ - deletePullRequestReviewProtection: { - (params?: RestEndpointMethodTypes["repos"]["deletePullRequestReviewProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Users with push access to the repository can delete a release. - */ - deleteRelease: { - (params?: RestEndpointMethodTypes["repos"]["deleteRelease"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deleteReleaseAsset: { - (params?: RestEndpointMethodTypes["repos"]["deleteReleaseAsset"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - deleteWebhook: { - (params?: RestEndpointMethodTypes["repos"]["deleteWebhook"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Disables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://help.github.com/en/articles/configuring-automated-security-fixes)". - */ - disableAutomatedSecurityFixes: { - (params?: RestEndpointMethodTypes["repos"]["disableAutomatedSecurityFixes"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Disables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://help.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". - */ - disableVulnerabilityAlerts: { - (params?: RestEndpointMethodTypes["repos"]["disableVulnerabilityAlerts"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually - * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use - * the `Location` header to make a second `GET` request. - * **Note**: For private repositories, these links are temporary and expire after five minutes. - * @deprecated octokit.rest.repos.downloadArchive() has been renamed to octokit.rest.repos.downloadZipballArchive() (2020-09-17) - */ - downloadArchive: { - (params?: RestEndpointMethodTypes["repos"]["downloadArchive"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a redirect URL to download a tar archive for a repository. If you omit `:ref`, the repository’s default branch (usually - * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use - * the `Location` header to make a second `GET` request. - * **Note**: For private repositories, these links are temporary and expire after five minutes. - */ - downloadTarballArchive: { - (params?: RestEndpointMethodTypes["repos"]["downloadTarballArchive"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually - * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use - * the `Location` header to make a second `GET` request. - * **Note**: For private repositories, these links are temporary and expire after five minutes. - */ - downloadZipballArchive: { - (params?: RestEndpointMethodTypes["repos"]["downloadZipballArchive"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Enables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://help.github.com/en/articles/configuring-automated-security-fixes)". - */ - enableAutomatedSecurityFixes: { - (params?: RestEndpointMethodTypes["repos"]["enableAutomatedSecurityFixes"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Enables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://help.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". - */ - enableVulnerabilityAlerts: { - (params?: RestEndpointMethodTypes["repos"]["enableVulnerabilityAlerts"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * When you pass the `scarlet-witch-preview` media type, requests to get a repository will also return the repository's code of conduct if it can be detected from the repository's code of conduct file. - * - * The `parent` and `source` objects are present when the repository is a fork. `parent` is the repository this repository was forked from, `source` is the ultimate source for the network. - */ - get: { - (params?: RestEndpointMethodTypes["repos"]["get"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists who has access to this protected branch. - * - * **Note**: Users, apps, and teams `restrictions` are only available for organization-owned repositories. - */ - getAccessRestrictions: { - (params?: RestEndpointMethodTypes["repos"]["getAccessRestrictions"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - */ - getAdminBranchProtection: { - (params?: RestEndpointMethodTypes["repos"]["getAdminBranchProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get all environments for a repository. - * - * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - getAllEnvironments: { - (params?: RestEndpointMethodTypes["repos"]["getAllEnvironments"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - */ - getAllStatusCheckContexts: { - (params?: RestEndpointMethodTypes["repos"]["getAllStatusCheckContexts"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getAllTopics: { - (params?: RestEndpointMethodTypes["repos"]["getAllTopics"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists the GitHub Apps that have push access to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. - */ - getAppsWithAccessToProtectedBranch: { - (params?: RestEndpointMethodTypes["repos"]["getAppsWithAccessToProtectedBranch"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getBranch: { - (params?: RestEndpointMethodTypes["repos"]["getBranch"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - */ - getBranchProtection: { - (params?: RestEndpointMethodTypes["repos"]["getBranchProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get the total number of clones and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. - */ - getClones: { - (params?: RestEndpointMethodTypes["repos"]["getClones"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns a weekly aggregate of the number of additions and deletions pushed to a repository. - */ - getCodeFrequencyStats: { - (params?: RestEndpointMethodTypes["repos"]["getCodeFrequencyStats"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Checks the repository permission of a collaborator. The possible repository permissions are `admin`, `write`, `read`, and `none`. - */ - getCollaboratorPermissionLevel: { - (params?: RestEndpointMethodTypes["repos"]["getCollaboratorPermissionLevel"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Users with pull access in a repository can access a combined view of commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. - * - * The most recent status for each context is returned, up to 100. This field [paginates](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination) if there are over 100 contexts. - * - * Additionally, a combined `state` is returned. The `state` is one of: - * - * * **failure** if any of the contexts report as `error` or `failure` - * * **pending** if there are no statuses or a context is `pending` - * * **success** if the latest status for all contexts is `success` - */ - getCombinedStatusForRef: { - (params?: RestEndpointMethodTypes["repos"]["getCombinedStatusForRef"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns the contents of a single commit reference. You must have `read` access for the repository to use this endpoint. - * - * **Note:** If there are more than 300 files in the commit diff, the response will include pagination link headers for the remaining files, up to a limit of 3000 files. Each page contains the static commit information, and the only changes are to the file listing. - * - * You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch `diff` and `patch` formats. Diffs with binary data will have no `patch` property. - * - * To return only the SHA-1 hash of the commit reference, you can provide the `sha` custom [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) in the `Accept` header. You can use this endpoint to check if a remote reference's SHA-1 hash is the same as your local reference's SHA-1 hash by providing the local SHA-1 reference as the ETag. - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - getCommit: { - (params?: RestEndpointMethodTypes["repos"]["getCommit"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns the last year of commit activity grouped by week. The `days` array is a group of commits per day, starting on `Sunday`. - */ - getCommitActivityStats: { - (params?: RestEndpointMethodTypes["repos"]["getCommitActivityStats"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getCommitComment: { - (params?: RestEndpointMethodTypes["repos"]["getCommitComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * When authenticated with admin or owner permissions to the repository, you can use this endpoint to check whether a branch requires signed commits. An enabled status of `true` indicates you must sign commits on this branch. For more information, see [Signing commits with GPG](https://help.github.com/articles/signing-commits-with-gpg) in GitHub Help. - * - * **Note**: You must enable branch protection to require signed commits. - */ - getCommitSignatureProtection: { - (params?: RestEndpointMethodTypes["repos"]["getCommitSignatureProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This endpoint will return all community profile metrics, including an - * overall health score, repository description, the presence of documentation, detected - * code of conduct, detected license, and the presence of ISSUE\_TEMPLATE, PULL\_REQUEST\_TEMPLATE, - * README, and CONTRIBUTING files. - * - * The `health_percentage` score is defined as a percentage of how many of - * these four documents are present: README, CONTRIBUTING, LICENSE, and - * CODE_OF_CONDUCT. For example, if all four documents are present, then - * the `health_percentage` is `100`. If only one is present, then the - * `health_percentage` is `25`. - * - * `content_reports_enabled` is only returned for organization-owned repositories. - */ - getCommunityProfileMetrics: { - (params?: RestEndpointMethodTypes["repos"]["getCommunityProfileMetrics"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit - * `:path`, you will receive the contents of the repository's root directory. See the description below regarding what the API response includes for directories. - * - * Files and symlinks support [a custom media type](https://docs.github.com/rest/reference/repos#custom-media-types) for - * retrieving the raw content or rendered HTML (when supported). All content types support [a custom media - * type](https://docs.github.com/rest/reference/repos#custom-media-types) to ensure the content is returned in a consistent - * object format. - * - * **Note**: - * * To get a repository's contents recursively, you can [recursively get the tree](https://docs.github.com/rest/reference/git#trees). - * * This API has an upper limit of 1,000 files for a directory. If you need to retrieve more files, use the [Git Trees - * API](https://docs.github.com/rest/reference/git#get-a-tree). - * * This API supports files up to 1 megabyte in size. - * - * #### If the content is a directory - * The response will be an array of objects, one object for each item in the directory. - * When listing the contents of a directory, submodules have their "type" specified as "file". Logically, the value - * _should_ be "submodule". This behavior exists in API v3 [for backwards compatibility purposes](https://git.io/v1YCW). - * In the next major version of the API, the type will be returned as "submodule". - * - * #### If the content is a symlink - * If the requested `:path` points to a symlink, and the symlink's target is a normal file in the repository, then the - * API responds with the content of the file (in the format shown in the example. Otherwise, the API responds with an object - * describing the symlink itself. - * - * #### If the content is a submodule - * The `submodule_git_url` identifies the location of the submodule repository, and the `sha` identifies a specific - * commit within the submodule repository. Git uses the given URL when cloning the submodule repository, and checks out - * the submodule at that specific commit. - * - * If the submodule repository is not hosted on github.com, the Git URLs (`git_url` and `_links["git"]`) and the - * github.com URLs (`html_url` and `_links["html"]`) will have null values. - */ - getContent: { - (params?: RestEndpointMethodTypes["repos"]["getContent"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns the `total` number of commits authored by the contributor. In addition, the response includes a Weekly Hash (`weeks` array) with the following information: - * - * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). - * * `a` - Number of additions - * * `d` - Number of deletions - * * `c` - Number of commits - */ - getContributorsStats: { - (params?: RestEndpointMethodTypes["repos"]["getContributorsStats"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getDeployKey: { - (params?: RestEndpointMethodTypes["repos"]["getDeployKey"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getDeployment: { - (params?: RestEndpointMethodTypes["repos"]["getDeployment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Users with pull access can view a deployment status for a deployment: - */ - getDeploymentStatus: { - (params?: RestEndpointMethodTypes["repos"]["getDeploymentStatus"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - getEnvironment: { - (params?: RestEndpointMethodTypes["repos"]["getEnvironment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getLatestPagesBuild: { - (params?: RestEndpointMethodTypes["repos"]["getLatestPagesBuild"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * View the latest published full release for the repository. - * - * The latest release is the most recent non-prerelease, non-draft release, sorted by the `created_at` attribute. The `created_at` attribute is the date of the commit used for the release, and not the date when the release was drafted or published. - */ - getLatestRelease: { - (params?: RestEndpointMethodTypes["repos"]["getLatestRelease"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getPages: { - (params?: RestEndpointMethodTypes["repos"]["getPages"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - getPagesBuild: { - (params?: RestEndpointMethodTypes["repos"]["getPagesBuild"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns the total commit counts for the `owner` and total commit counts in `all`. `all` is everyone combined, including the `owner` in the last 52 weeks. If you'd like to get the commit counts for non-owners, you can subtract `owner` from `all`. - * - * The array order is oldest week (index 0) to most recent week. - */ - getParticipationStats: { - (params?: RestEndpointMethodTypes["repos"]["getParticipationStats"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - */ - getPullRequestReviewProtection: { - (params?: RestEndpointMethodTypes["repos"]["getPullRequestReviewProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Each array contains the day number, hour number, and number of commits: - * - * * `0-6`: Sunday - Saturday - * * `0-23`: Hour of day - * * Number of commits - * - * For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. - */ - getPunchCardStats: { - (params?: RestEndpointMethodTypes["repos"]["getPunchCardStats"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the preferred README for a repository. - * - * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. - */ - getReadme: { - (params?: RestEndpointMethodTypes["repos"]["getReadme"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets the README from a repository directory. - * - * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. - */ - getReadmeInDirectory: { - (params?: RestEndpointMethodTypes["repos"]["getReadmeInDirectory"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). - */ - getRelease: { - (params?: RestEndpointMethodTypes["repos"]["getRelease"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. - */ - getReleaseAsset: { - (params?: RestEndpointMethodTypes["repos"]["getReleaseAsset"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get a published release with the specified tag. - */ - getReleaseByTag: { - (params?: RestEndpointMethodTypes["repos"]["getReleaseByTag"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - */ - getStatusChecksProtection: { - (params?: RestEndpointMethodTypes["repos"]["getStatusChecksProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists the teams who have push access to this branch. The list includes child teams. - */ - getTeamsWithAccessToProtectedBranch: { - (params?: RestEndpointMethodTypes["repos"]["getTeamsWithAccessToProtectedBranch"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get the top 10 popular contents over the last 14 days. - */ - getTopPaths: { - (params?: RestEndpointMethodTypes["repos"]["getTopPaths"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get the top 10 referrers over the last 14 days. - */ - getTopReferrers: { - (params?: RestEndpointMethodTypes["repos"]["getTopReferrers"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists the people who have push access to this branch. - */ - getUsersWithAccessToProtectedBranch: { - (params?: RestEndpointMethodTypes["repos"]["getUsersWithAccessToProtectedBranch"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get the total number of views and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. - */ - getViews: { - (params?: RestEndpointMethodTypes["repos"]["getViews"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns a webhook configured in a repository. To get only the webhook `config` properties, see "[Get a webhook configuration for a repository](/rest/reference/repos#get-a-webhook-configuration-for-a-repository)." - */ - getWebhook: { - (params?: RestEndpointMethodTypes["repos"]["getWebhook"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Returns the webhook configuration for a repository. To get more information about the webhook, including the `active` state and `events`, use "[Get a repository webhook](/rest/reference/orgs#get-a-repository-webhook)." - * - * Access tokens must have the `read:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:read` permission. - */ - getWebhookConfigForRepo: { - (params?: RestEndpointMethodTypes["repos"]["getWebhookConfigForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listBranches: { - (params?: RestEndpointMethodTypes["repos"]["listBranches"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Returns all branches where the given commit SHA is the HEAD, or latest commit for the branch. - */ - listBranchesForHeadCommit: { - (params?: RestEndpointMethodTypes["repos"]["listBranchesForHeadCommit"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. - * - * Team members will include the members of child teams. - */ - listCollaborators: { - (params?: RestEndpointMethodTypes["repos"]["listCollaborators"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Use the `:commit_sha` to specify the commit that will have its comments listed. - */ - listCommentsForCommit: { - (params?: RestEndpointMethodTypes["repos"]["listCommentsForCommit"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Commit Comments use [these custom media types](https://docs.github.com/rest/reference/repos#custom-media-types). You can read more about the use of media types in the API [here](https://docs.github.com/rest/overview/media-types/). - * - * Comments are ordered by ascending ID. - */ - listCommitCommentsForRepo: { - (params?: RestEndpointMethodTypes["repos"]["listCommitCommentsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Users with pull access in a repository can view commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. Statuses are returned in reverse chronological order. The first status in the list will be the latest one. - * - * This resource is also available via a legacy route: `GET /repos/:owner/:repo/statuses/:ref`. - */ - listCommitStatusesForRef: { - (params?: RestEndpointMethodTypes["repos"]["listCommitStatusesForRef"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - listCommits: { - (params?: RestEndpointMethodTypes["repos"]["listCommits"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists contributors to the specified repository and sorts them by the number of commits per contributor in descending order. This endpoint may return information that is a few hours old because the GitHub REST API v3 caches contributor data to improve performance. - * - * GitHub identifies contributors by author email address. This endpoint groups contribution counts by GitHub user, which includes all associated email addresses. To improve performance, only the first 500 author email addresses in the repository link to GitHub users. The rest will appear as anonymous contributors without associated GitHub user information. - */ - listContributors: { - (params?: RestEndpointMethodTypes["repos"]["listContributors"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listDeployKeys: { - (params?: RestEndpointMethodTypes["repos"]["listDeployKeys"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Users with pull access can view deployment statuses for a deployment: - */ - listDeploymentStatuses: { - (params?: RestEndpointMethodTypes["repos"]["listDeploymentStatuses"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Simple filtering of deployments is available via query parameters: - */ - listDeployments: { - (params?: RestEndpointMethodTypes["repos"]["listDeployments"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. - * - * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. - */ - listForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["repos"]["listForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists repositories for the specified organization. - */ - listForOrg: { - (params?: RestEndpointMethodTypes["repos"]["listForOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists public repositories for the specified user. Note: For GitHub AE, this endpoint will list internal repositories for the specified user. - */ - listForUser: { - (params?: RestEndpointMethodTypes["repos"]["listForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listForks: { - (params?: RestEndpointMethodTypes["repos"]["listForks"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * When authenticating as a user with admin rights to a repository, this endpoint will list all currently open repository invitations. - */ - listInvitations: { - (params?: RestEndpointMethodTypes["repos"]["listInvitations"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * When authenticating as a user, this endpoint will list all currently open repository invitations for that user. - */ - listInvitationsForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["repos"]["listInvitationsForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists languages for the specified repository. The value shown for each language is the number of bytes of code written in that language. - */ - listLanguages: { - (params?: RestEndpointMethodTypes["repos"]["listLanguages"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listPagesBuilds: { - (params?: RestEndpointMethodTypes["repos"]["listPagesBuilds"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all public repositories in the order that they were created. - * - * Notes: - * - For GitHub Enterprise Server and GitHub AE, this endpoint will only list repositories available to all users on the enterprise. - * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of repositories. - */ - listPublic: { - (params?: RestEndpointMethodTypes["repos"]["listPublic"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the merged pull request that introduced the commit to the repository. If the commit is not present in the default branch, additionally returns open pull requests associated with the commit. The results may include open and closed pull requests. Additional preview headers may be required to see certain details for associated pull requests, such as whether a pull request is in a draft state. For more information about previews that might affect this endpoint, see the [List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests) endpoint. - */ - listPullRequestsAssociatedWithCommit: { - (params?: RestEndpointMethodTypes["repos"]["listPullRequestsAssociatedWithCommit"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listReleaseAssets: { - (params?: RestEndpointMethodTypes["repos"]["listReleaseAssets"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This returns a list of releases, which does not include regular Git tags that have not been associated with a release. To get a list of Git tags, use the [Repository Tags API](https://docs.github.com/rest/reference/repos#list-repository-tags). - * - * Information about published releases are available to everyone. Only users with push access will receive listings for draft releases. - */ - listReleases: { - (params?: RestEndpointMethodTypes["repos"]["listReleases"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listTags: { - (params?: RestEndpointMethodTypes["repos"]["listTags"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listTeams: { - (params?: RestEndpointMethodTypes["repos"]["listTeams"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - listWebhooks: { - (params?: RestEndpointMethodTypes["repos"]["listWebhooks"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - merge: { - (params?: RestEndpointMethodTypes["repos"]["merge"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. - */ - pingWebhook: { - (params?: RestEndpointMethodTypes["repos"]["pingWebhook"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Removes the ability of an app to push to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. - * - * | Type | Description | - * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - removeAppAccessRestrictions: { - (params?: RestEndpointMethodTypes["repos"]["removeAppAccessRestrictions"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - removeCollaborator: { - (params?: RestEndpointMethodTypes["repos"]["removeCollaborator"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - */ - removeStatusCheckContexts: { - (params?: RestEndpointMethodTypes["repos"]["removeStatusCheckContexts"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - */ - removeStatusCheckProtection: { - (params?: RestEndpointMethodTypes["repos"]["removeStatusCheckProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Removes the ability of a team to push to this branch. You can also remove push access for child teams. - * - * | Type | Description | - * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | Teams that should no longer have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - removeTeamAccessRestrictions: { - (params?: RestEndpointMethodTypes["repos"]["removeTeamAccessRestrictions"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Removes the ability of a user to push to this branch. - * - * | Type | Description | - * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | Usernames of the people who should no longer have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - removeUserAccessRestrictions: { - (params?: RestEndpointMethodTypes["repos"]["removeUserAccessRestrictions"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Renames a branch in a repository. - * - * **Note:** Although the API responds immediately, the branch rename process might take some extra time to complete in the background. You won't be able to push to the old branch name while the rename process is in progress. For more information, see "[Renaming a branch](https://docs.github.com/github/administering-a-repository/renaming-a-branch)". - * - * The permissions required to use this endpoint depends on whether you are renaming the default branch. - * - * To rename a non-default branch: - * - * * Users must have push access. - * * GitHub Apps must have the `contents:write` repository permission. - * - * To rename the default branch: - * - * * Users must have admin or owner permissions. - * * GitHub Apps must have the `administration:write` repository permission. - */ - renameBranch: { - (params?: RestEndpointMethodTypes["repos"]["renameBranch"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - replaceAllTopics: { - (params?: RestEndpointMethodTypes["repos"]["replaceAllTopics"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * You can request that your site be built from the latest revision on the default branch. This has the same effect as pushing a commit to your default branch, but does not require an additional commit. Manually triggering page builds can be helpful when diagnosing build warnings and failures. - * - * Build requests are limited to one concurrent build per repository and one concurrent build per requester. If you request a build while another is still in progress, the second request will be queued until the first completes. - */ - requestPagesBuild: { - (params?: RestEndpointMethodTypes["repos"]["requestPagesBuild"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Adding admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. - */ - setAdminBranchProtection: { - (params?: RestEndpointMethodTypes["repos"]["setAdminBranchProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Replaces the list of apps that have push access to this branch. This removes all apps that previously had push access and grants push access to the new list of apps. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. - * - * | Type | Description | - * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - setAppAccessRestrictions: { - (params?: RestEndpointMethodTypes["repos"]["setAppAccessRestrictions"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - */ - setStatusCheckContexts: { - (params?: RestEndpointMethodTypes["repos"]["setStatusCheckContexts"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Replaces the list of teams that have push access to this branch. This removes all teams that previously had push access and grants push access to the new list of teams. Team restrictions include child teams. - * - * | Type | Description | - * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | - * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - setTeamAccessRestrictions: { - (params?: RestEndpointMethodTypes["repos"]["setTeamAccessRestrictions"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Replaces the list of people that have push access to this branch. This removes all people that previously had push access and grants push access to the new list of people. - * - * | Type | Description | - * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | - * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - setUserAccessRestrictions: { - (params?: RestEndpointMethodTypes["repos"]["setUserAccessRestrictions"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This will trigger the hook with the latest push to the current repository if the hook is subscribed to `push` events. If the hook is not subscribed to `push` events, the server will respond with 204 but no test POST will be generated. - * - * **Note**: Previously `/repos/:owner/:repo/hooks/:hook_id/test` - */ - testPushWebhook: { - (params?: RestEndpointMethodTypes["repos"]["testPushWebhook"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * A transfer request will need to be accepted by the new owner when transferring a personal repository to another user. The response will contain the original `owner`, and the transfer will continue asynchronously. For more details on the requirements to transfer personal and organization-owned repositories, see [about repository transfers](https://help.github.com/articles/about-repository-transfers/). - */ - transfer: { - (params?: RestEndpointMethodTypes["repos"]["transfer"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note**: To edit a repository's topics, use the [Replace all repository topics](https://docs.github.com/rest/reference/repos#replace-all-repository-topics) endpoint. - */ - update: { - (params?: RestEndpointMethodTypes["repos"]["update"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Protecting a branch requires admin or owner permissions to the repository. - * - * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. - * - * **Note**: The list of users, apps, and teams in total is limited to 100 items. - */ - updateBranchProtection: { - (params?: RestEndpointMethodTypes["repos"]["updateBranchProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - updateCommitComment: { - (params?: RestEndpointMethodTypes["repos"]["updateCommitComment"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Updates information for a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). - */ - updateInformationAboutPagesSite: { - (params?: RestEndpointMethodTypes["repos"]["updateInformationAboutPagesSite"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - updateInvitation: { - (params?: RestEndpointMethodTypes["repos"]["updateInvitation"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Updating pull request review enforcement requires admin or owner permissions to the repository and branch protection to be enabled. - * - * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. - */ - updatePullRequestReviewProtection: { - (params?: RestEndpointMethodTypes["repos"]["updatePullRequestReviewProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Users with push access to the repository can edit a release. - */ - updateRelease: { - (params?: RestEndpointMethodTypes["repos"]["updateRelease"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Users with push access to the repository can edit a release asset. - */ - updateReleaseAsset: { - (params?: RestEndpointMethodTypes["repos"]["updateReleaseAsset"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. - * @deprecated octokit.rest.repos.updateStatusCheckPotection() has been renamed to octokit.rest.repos.updateStatusCheckProtection() (2020-09-17) - */ - updateStatusCheckPotection: { - (params?: RestEndpointMethodTypes["repos"]["updateStatusCheckPotection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. - */ - updateStatusCheckProtection: { - (params?: RestEndpointMethodTypes["repos"]["updateStatusCheckProtection"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Updates a webhook configured in a repository. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for a repository](/rest/reference/repos#update-a-webhook-configuration-for-a-repository)." - */ - updateWebhook: { - (params?: RestEndpointMethodTypes["repos"]["updateWebhook"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Updates the webhook configuration for a repository. To update more information about the webhook, including the `active` state and `events`, use "[Update a repository webhook](/rest/reference/orgs#update-a-repository-webhook)." - * - * Access tokens must have the `write:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:write` permission. - */ - updateWebhookConfigForRepo: { - (params?: RestEndpointMethodTypes["repos"]["updateWebhookConfigForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This endpoint makes use of [a Hypermedia relation](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) to determine which URL to access. The endpoint you call to upload release assets is specific to your release. Use the `upload_url` returned in - * the response of the [Create a release endpoint](https://docs.github.com/rest/reference/repos#create-a-release) to upload a release asset. - * - * You need to use an HTTP client which supports [SNI](http://en.wikipedia.org/wiki/Server_Name_Indication) to make calls to this endpoint. - * - * Most libraries will set the required `Content-Length` header automatically. Use the required `Content-Type` header to provide the media type of the asset. For a list of media types, see [Media Types](https://www.iana.org/assignments/media-types/media-types.xhtml). For example: - * - * `application/zip` - * - * GitHub expects the asset data in its raw binary form, rather than JSON. You will send the raw binary content of the asset as the request body. Everything else about the endpoint is the same as the rest of the API. For example, - * you'll still need to pass your authentication to be able to upload an asset. - * - * When an upstream failure occurs, you will receive a `502 Bad Gateway` status. This may leave an empty asset with a state of `starter`. It can be safely deleted. - * - * **Notes:** - * * GitHub renames asset filenames that have special characters, non-alphanumeric characters, and leading or trailing periods. The "[List assets for a release](https://docs.github.com/rest/reference/repos#list-assets-for-a-release)" - * endpoint lists the renamed filenames. For more information and help, contact [GitHub Support](https://support.github.com/contact). - * * If you upload an asset with the same filename as another uploaded asset, you'll receive an error and must delete the old file before you can re-upload the new asset. - */ - uploadReleaseAsset: { - (params?: RestEndpointMethodTypes["repos"]["uploadReleaseAsset"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - search: { - /** - * Searches for query terms inside of a file. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for code, you can get text match metadata for the file **content** and file **path** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to find the definition of the `addClass` function inside [jQuery](https://github.com/jquery/jquery) repository, your query would look something like this: - * - * `q=addClass+in:file+language:js+repo:jquery/jquery` - * - * This query searches for the keyword `addClass` within a file's contents. The query limits the search to files where the language is JavaScript in the `jquery/jquery` repository. - * - * #### Considerations for code search - * - * Due to the complexity of searching code, there are a few restrictions on how searches are performed: - * - * * Only the _default branch_ is considered. In most cases, this will be the `master` branch. - * * Only files smaller than 384 KB are searchable. - * * You must always include at least one search term when searching source code. For example, searching for [`language:go`](https://github.com/search?utf8=%E2%9C%93&q=language%3Ago&type=Code) is not valid, while [`amazing - * language:go`](https://github.com/search?utf8=%E2%9C%93&q=amazing+language%3Ago&type=Code) is. - */ - code: { - (params?: RestEndpointMethodTypes["search"]["code"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Find commits via various criteria on the default branch (usually `master`). This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for commits, you can get text match metadata for the **message** field when you provide the `text-match` media type. For more details about how to receive highlighted search results, see [Text match - * metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to find commits related to CSS in the [octocat/Spoon-Knife](https://github.com/octocat/Spoon-Knife) repository. Your query would look something like this: - * - * `q=repo:octocat/Spoon-Knife+css` - */ - commits: { - (params?: RestEndpointMethodTypes["search"]["commits"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Find issues by state and keyword. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for issues, you can get text match metadata for the issue **title**, issue **body**, and issue **comment body** fields when you pass the `text-match` media type. For more details about how to receive highlighted - * search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this. - * - * `q=windows+label:bug+language:python+state:open&sort=created&order=asc` - * - * This query searches for the keyword `windows`, within any open issue that is labeled as `bug`. The search runs across repositories whose primary language is Python. The results are sorted by creation date in ascending order, which means the oldest issues appear first in the search results. - * - * **Note:** For [user-to-server](https://docs.github.com/developers/apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests) GitHub App requests, you can't retrieve a combination of issues and pull requests in a single query. Requests that don't include the `is:issue` or `is:pull-request` qualifier will receive an HTTP `422 Unprocessable Entity` response. To get results for both issues and pull requests, you must send separate queries for issues and pull requests. For more information about the `is` qualifier, see "[Searching only issues or pull requests](https://docs.github.com/github/searching-for-information-on-github/searching-issues-and-pull-requests#search-only-issues-or-pull-requests)." - */ - issuesAndPullRequests: { - (params?: RestEndpointMethodTypes["search"]["issuesAndPullRequests"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Find labels in a repository with names or descriptions that match search keywords. Returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for labels, you can get text match metadata for the label **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to find labels in the `linguist` repository that match `bug`, `defect`, or `enhancement`. Your query might look like this: - * - * `q=bug+defect+enhancement&repository_id=64778136` - * - * The labels that best match the query appear first in the search results. - */ - labels: { - (params?: RestEndpointMethodTypes["search"]["labels"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Find repositories via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for repositories, you can get text match metadata for the **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to search for popular Tetris repositories written in assembly code, your query might look like this: - * - * `q=tetris+language:assembly&sort=stars&order=desc` - * - * This query searches for repositories with the word `tetris` in the name, the description, or the README. The results are limited to repositories where the primary language is assembly. The results are sorted by stars in descending order, so that the most popular repositories appear first in the search results. - * - * When you include the `mercy` preview header, you can also search for multiple topics by adding more `topic:` instances. For example, your query might look like this: - * - * `q=topic:ruby+topic:rails` - */ - repos: { - (params?: RestEndpointMethodTypes["search"]["repos"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Find topics via various criteria. Results are sorted by best match. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). See "[Searching topics](https://help.github.com/articles/searching-topics/)" for a detailed list of qualifiers. - * - * When searching for topics, you can get text match metadata for the topic's **short\_description**, **description**, **name**, or **display\_name** field when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to search for topics related to Ruby that are featured on https://github.com/topics. Your query might look like this: - * - * `q=ruby+is:featured` - * - * This query searches for topics with the keyword `ruby` and limits the results to find only topics that are featured. The topics that are the best match for the query appear first in the search results. - */ - topics: { - (params?: RestEndpointMethodTypes["search"]["topics"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Find users via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for users, you can get text match metadata for the issue **login**, **email**, and **name** fields when you pass the `text-match` media type. For more details about highlighting search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you're looking for a list of popular users, you might try this query: - * - * `q=tom+repos:%3E42+followers:%3E1000` - * - * This query searches for users with the name `tom`. The results are restricted to users with more than 42 repositories and over 1,000 followers. - */ - users: { - (params?: RestEndpointMethodTypes["search"]["users"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - secretScanning: { - /** - * Gets a single secret scanning alert detected in a private repository. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope. - * - * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. - */ - getAlert: { - (params?: RestEndpointMethodTypes["secretScanning"]["getAlert"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all secret scanning alerts for a private repository, from newest to oldest. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope. - * - * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. - */ - listAlertsForRepo: { - (params?: RestEndpointMethodTypes["secretScanning"]["listAlertsForRepo"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Updates the status of a secret scanning alert in a private repository. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope. - * - * GitHub Apps must have the `secret_scanning_alerts` write permission to use this endpoint. - */ - updateAlert: { - (params?: RestEndpointMethodTypes["secretScanning"]["updateAlert"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - teams: { - /** - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Adds an organization member to a team. An authenticated organization owner or team maintainer can add organization members to a team. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - * - * An organization owner can add someone who is not part of the team's organization to a team. When an organization owner adds someone to a team who is not an organization member, this endpoint will send an invitation to the person via email. This newly-created membership will be in the "pending" state until the person accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. - * - * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/memberships/{username}`. - */ - addOrUpdateMembershipForUserInOrg: { - (params?: RestEndpointMethodTypes["teams"]["addOrUpdateMembershipForUserInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/projects/{project_id}`. - */ - addOrUpdateProjectPermissionsInOrg: { - (params?: RestEndpointMethodTypes["teams"]["addOrUpdateProjectPermissionsInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. - * - * For more information about the permission levels, see "[Repository permission levels for an organization](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". - */ - addOrUpdateRepoPermissionsInOrg: { - (params?: RestEndpointMethodTypes["teams"]["addOrUpdateRepoPermissionsInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects/{project_id}`. - */ - checkPermissionsForProjectInOrg: { - (params?: RestEndpointMethodTypes["teams"]["checkPermissionsForProjectInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Checks whether a team has `admin`, `push`, `maintain`, `triage`, or `pull` permission for a repository. Repositories inherited through a parent team will also be checked. - * - * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `application/vnd.github.v3.repository+json` accept header. - * - * If a team doesn't have permission for the repository, you will receive a `404 Not Found` response status. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. - */ - checkPermissionsForRepoInOrg: { - (params?: RestEndpointMethodTypes["teams"]["checkPermissionsForRepoInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * To create a team, the authenticated user must be a member or owner of `{org}`. By default, organization members can create teams. Organization owners can limit team creation to organization owners. For more information, see "[Setting team creation permissions](https://help.github.com/en/articles/setting-team-creation-permissions-in-your-organization)." - * - * When you create a new team, you automatically become a team maintainer without explicitly adding yourself to the optional array of `maintainers`. For more information, see "[About teams](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/about-teams)". - */ - create: { - (params?: RestEndpointMethodTypes["teams"]["create"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. - */ - createDiscussionCommentInOrg: { - (params?: RestEndpointMethodTypes["teams"]["createDiscussionCommentInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions`. - */ - createDiscussionInOrg: { - (params?: RestEndpointMethodTypes["teams"]["createDiscussionInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. - */ - deleteDiscussionCommentInOrg: { - (params?: RestEndpointMethodTypes["teams"]["deleteDiscussionCommentInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. - */ - deleteDiscussionInOrg: { - (params?: RestEndpointMethodTypes["teams"]["deleteDiscussionInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * To delete a team, the authenticated user must be an organization owner or team maintainer. - * - * If you are an organization owner, deleting a parent team will delete all of its child teams as well. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}`. - */ - deleteInOrg: { - (params?: RestEndpointMethodTypes["teams"]["deleteInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Gets a team using the team's `slug`. GitHub generates the `slug` from the team `name`. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}`. - */ - getByName: { - (params?: RestEndpointMethodTypes["teams"]["getByName"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. - */ - getDiscussionCommentInOrg: { - (params?: RestEndpointMethodTypes["teams"]["getDiscussionCommentInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. - */ - getDiscussionInOrg: { - (params?: RestEndpointMethodTypes["teams"]["getDiscussionInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Team members will include the members of child teams. - * - * To get a user's membership with a team, the team must be visible to the authenticated user. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/memberships/{username}`. - * - * **Note:** - * The response contains the `state` of the membership and the member's `role`. - * - * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). - */ - getMembershipForUserInOrg: { - (params?: RestEndpointMethodTypes["teams"]["getMembershipForUserInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all teams in an organization that are visible to the authenticated user. - */ - list: { - (params?: RestEndpointMethodTypes["teams"]["list"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the child teams of the team specified by `{team_slug}`. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/teams`. - */ - listChildInOrg: { - (params?: RestEndpointMethodTypes["teams"]["listChildInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. - */ - listDiscussionCommentsInOrg: { - (params?: RestEndpointMethodTypes["teams"]["listDiscussionCommentsInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions`. - */ - listDiscussionsInOrg: { - (params?: RestEndpointMethodTypes["teams"]["listDiscussionsInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List all of the teams across all of the organizations to which the authenticated user belongs. This method requires `user`, `repo`, or `read:org` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/) when authenticating via [OAuth](https://docs.github.com/apps/building-oauth-apps/). - */ - listForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["teams"]["listForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Team members will include the members of child teams. - * - * To list members in a team, the team must be visible to the authenticated user. - */ - listMembersInOrg: { - (params?: RestEndpointMethodTypes["teams"]["listMembersInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/invitations`. - */ - listPendingInvitationsInOrg: { - (params?: RestEndpointMethodTypes["teams"]["listPendingInvitationsInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the organization projects for a team. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects`. - */ - listProjectsInOrg: { - (params?: RestEndpointMethodTypes["teams"]["listProjectsInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists a team's repositories visible to the authenticated user. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos`. - */ - listReposInOrg: { - (params?: RestEndpointMethodTypes["teams"]["listReposInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/memberships/{username}`. - */ - removeMembershipForUserInOrg: { - (params?: RestEndpointMethodTypes["teams"]["removeMembershipForUserInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. This endpoint removes the project from the team, but does not delete the project. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/projects/{project_id}`. - */ - removeProjectInOrg: { - (params?: RestEndpointMethodTypes["teams"]["removeProjectInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. This does not delete the repository, it just removes it from the team. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. - */ - removeRepoInOrg: { - (params?: RestEndpointMethodTypes["teams"]["removeRepoInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. - */ - updateDiscussionCommentInOrg: { - (params?: RestEndpointMethodTypes["teams"]["updateDiscussionCommentInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. - */ - updateDiscussionInOrg: { - (params?: RestEndpointMethodTypes["teams"]["updateDiscussionInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * To edit a team, the authenticated user must either be an organization owner or a team maintainer. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}`. - */ - updateInOrg: { - (params?: RestEndpointMethodTypes["teams"]["updateInOrg"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; - users: { - /** - * This endpoint is accessible with the `user` scope. - */ - addEmailForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["addEmailForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - block: { - (params?: RestEndpointMethodTypes["users"]["block"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - checkBlocked: { - (params?: RestEndpointMethodTypes["users"]["checkBlocked"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - checkFollowingForUser: { - (params?: RestEndpointMethodTypes["users"]["checkFollowingForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - checkPersonIsFollowedByAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["checkPersonIsFollowedByAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - createGpgKeyForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["createGpgKeyForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - createPublicSshKeyForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["createPublicSshKeyForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * This endpoint is accessible with the `user` scope. - */ - deleteEmailForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["deleteEmailForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - deleteGpgKeyForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["deleteGpgKeyForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - deletePublicSshKeyForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["deletePublicSshKeyForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - * - * Following a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. - */ - follow: { - (params?: RestEndpointMethodTypes["users"]["follow"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * If the authenticated user is authenticated through basic authentication or OAuth with the `user` scope, then the response lists public and private profile information. - * - * If the authenticated user is authenticated through OAuth without the `user` scope, then the response lists only public profile information. - */ - getAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["getAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Provides publicly available information about someone with a GitHub account. - * - * GitHub Apps with the `Plan` user permission can use this endpoint to retrieve information about a user's GitHub plan. The GitHub App must be authenticated as a user. See "[Identifying and authorizing users for GitHub Apps](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/)" for details about authentication. For an example response, see 'Response with GitHub plan information' below" - * - * The `email` key in the following response is the publicly visible email address from your GitHub [profile page](https://github.com/settings/profile). When setting up your profile, you can select a primary email address to be “public” which provides an email entry for this endpoint. If you do not set a public email address for `email`, then it will have a value of `null`. You only see publicly visible email addresses when authenticated with GitHub. For more information, see [Authentication](https://docs.github.com/rest/overview/resources-in-the-rest-api#authentication). - * - * The Emails API enables you to list all of your email addresses, and toggle a primary email to be visible publicly. For more information, see "[Emails API](https://docs.github.com/rest/reference/users#emails)". - */ - getByUsername: { - (params?: RestEndpointMethodTypes["users"]["getByUsername"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Provides hovercard information when authenticated through basic auth or OAuth with the `repo` scope. You can find out more about someone in relation to their pull requests, issues, repositories, and organizations. - * - * The `subject_type` and `subject_id` parameters provide context for the person's hovercard, which returns more information than without the parameters. For example, if you wanted to find out more about `octocat` who owns the `Spoon-Knife` repository via cURL, it would look like this: - * - * ```shell - * curl -u username:token - * https://api.github.com/users/octocat/hovercard?subject_type=repository&subject_id=1300192 - * ``` - */ - getContextForUser: { - (params?: RestEndpointMethodTypes["users"]["getContextForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - getGpgKeyForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["getGpgKeyForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - getPublicSshKeyForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["getPublicSshKeyForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all users, in the order that they signed up on GitHub. This list includes personal user accounts and organization accounts. - * - * Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of users. - */ - list: { - (params?: RestEndpointMethodTypes["users"]["list"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * List the users you've blocked on your personal account. - */ - listBlockedByAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["listBlockedByAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. - */ - listEmailsForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["listEmailsForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the people who the authenticated user follows. - */ - listFollowedByAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["listFollowedByAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the people following the authenticated user. - */ - listFollowersForAuthenticatedUser: { - (params?: RestEndpointMethodTypes["users"]["listFollowersForAuthenticatedUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the people following the specified user. - */ - listFollowersForUser: { - (params?: RestEndpointMethodTypes["users"]["listFollowersForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the people who the specified user follows. - */ - listFollowingForUser: { - (params?: RestEndpointMethodTypes["users"]["listFollowingForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - listGpgKeysForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["listGpgKeysForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the GPG keys for a user. This information is accessible by anyone. - */ - listGpgKeysForUser: { - (params?: RestEndpointMethodTypes["users"]["listGpgKeysForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. - */ - listPublicEmailsForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["listPublicEmailsForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the _verified_ public SSH keys for a user. This is accessible by anyone. - */ - listPublicKeysForUser: { - (params?: RestEndpointMethodTypes["users"]["listPublicKeysForUser"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - listPublicSshKeysForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["listPublicSshKeysForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Sets the visibility for your primary email addresses. - */ - setPrimaryEmailVisibilityForAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["setPrimaryEmailVisibilityForAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - unblock: { - (params?: RestEndpointMethodTypes["users"]["unblock"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * Unfollowing a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. - */ - unfollow: { - (params?: RestEndpointMethodTypes["users"]["unfollow"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - /** - * **Note:** If your email is set to private and you send an `email` parameter as part of this request to update your profile, your privacy settings are still enforced: the email address will not be displayed on your public profile or via the API. - */ - updateAuthenticated: { - (params?: RestEndpointMethodTypes["users"]["updateAuthenticated"]["parameters"]): Promise; - defaults: RequestInterface["defaults"]; - endpoint: EndpointInterface<{ - url: string; - }>; - }; - }; -}; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts deleted file mode 100644 index 44a2ec79..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts +++ /dev/null @@ -1,2629 +0,0 @@ -import { Endpoints, RequestParameters } from "@octokit/types"; -export declare type RestEndpointMethodTypes = { - actions: { - addSelectedRepoToOrgSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"]["response"]; - }; - cancelWorkflowRun: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"]["response"]; - }; - createOrUpdateEnvironmentSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"]["response"]; - }; - createOrUpdateOrgSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/actions/secrets/{secret_name}"]["response"]; - }; - createOrUpdateRepoSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"]["response"]; - }; - createRegistrationTokenForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/actions/runners/registration-token"]["response"]; - }; - createRegistrationTokenForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/actions/runners/registration-token"]["response"]; - }; - createRemoveTokenForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/actions/runners/remove-token"]["response"]; - }; - createRemoveTokenForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/actions/runners/remove-token"]["response"]; - }; - createWorkflowDispatch: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"]["response"]; - }; - deleteArtifact: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"]["response"]; - }; - deleteEnvironmentSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"]["response"]; - }; - deleteOrgSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/actions/secrets/{secret_name}"]["response"]; - }; - deleteRepoSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"]["response"]; - }; - deleteSelfHostedRunnerFromOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/actions/runners/{runner_id}"]["response"]; - }; - deleteSelfHostedRunnerFromRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"]["response"]; - }; - deleteWorkflowRun: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"]["response"]; - }; - deleteWorkflowRunLogs: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"]["response"]; - }; - disableSelectedRepositoryGithubActionsOrganization: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"]["response"]; - }; - disableWorkflow: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"]["response"]; - }; - downloadArtifact: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"]["response"]; - }; - downloadJobLogsForWorkflowRun: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"]["response"]; - }; - downloadWorkflowRunLogs: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"]["response"]; - }; - enableSelectedRepositoryGithubActionsOrganization: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"]["response"]; - }; - enableWorkflow: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"]["response"]; - }; - getAllowedActionsOrganization: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/actions/permissions/selected-actions"]["response"]; - }; - getAllowedActionsRepository: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"]["response"]; - }; - getArtifact: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"]["response"]; - }; - getEnvironmentPublicKey: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"]["response"]; - }; - getEnvironmentSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"]["response"]; - }; - getGithubActionsPermissionsOrganization: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/actions/permissions"]["response"]; - }; - getGithubActionsPermissionsRepository: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions"]["response"]; - }; - getJobForWorkflowRun: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"]["response"]; - }; - getOrgPublicKey: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/actions/secrets/public-key"]["response"]; - }; - getOrgSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/actions/secrets/{secret_name}"]["response"]; - }; - getPendingDeploymentsForRun: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"]["response"]; - }; - getRepoPermissions: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions"]["response"]; - }; - getRepoPublicKey: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/secrets/public-key"]["response"]; - }; - getRepoSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"]["response"]; - }; - getReviewsForRun: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"]["response"]; - }; - getSelfHostedRunnerForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/actions/runners/{runner_id}"]["response"]; - }; - getSelfHostedRunnerForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"]["response"]; - }; - getWorkflow: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"]["response"]; - }; - getWorkflowRun: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}"]["response"]; - }; - getWorkflowRunUsage: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"]["response"]; - }; - getWorkflowUsage: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"]["response"]; - }; - listArtifactsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts"]["response"]; - }; - listEnvironmentSecrets: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets"]["response"]; - }; - listJobsForWorkflowRun: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"]["response"]; - }; - listOrgSecrets: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/actions/secrets"]["response"]; - }; - listRepoSecrets: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/secrets"]["response"]; - }; - listRepoWorkflows: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows"]["response"]; - }; - listRunnerApplicationsForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/actions/runners/downloads"]["response"]; - }; - listRunnerApplicationsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/runners/downloads"]["response"]; - }; - listSelectedReposForOrgSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"]["response"]; - }; - listSelectedRepositoriesEnabledGithubActionsOrganization: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/actions/permissions/repositories"]["response"]; - }; - listSelfHostedRunnersForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/actions/runners"]["response"]; - }; - listSelfHostedRunnersForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/runners"]["response"]; - }; - listWorkflowRunArtifacts: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"]["response"]; - }; - listWorkflowRuns: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"]["response"]; - }; - listWorkflowRunsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/actions/runs"]["response"]; - }; - reRunWorkflow: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"]["response"]; - }; - removeSelectedRepoFromOrgSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"]["response"]; - }; - reviewPendingDeploymentsForRun: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"]["response"]; - }; - setAllowedActionsOrganization: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/actions/permissions/selected-actions"]["response"]; - }; - setAllowedActionsRepository: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"]["response"]; - }; - setGithubActionsPermissionsOrganization: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/actions/permissions"]["response"]; - }; - setGithubActionsPermissionsRepository: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/actions/permissions"]["response"]; - }; - setSelectedReposForOrgSecret: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"]["response"]; - }; - setSelectedRepositoriesEnabledGithubActionsOrganization: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/actions/permissions/repositories"]["response"]; - }; - }; - activity: { - checkRepoIsStarredByAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/starred/{owner}/{repo}"]["response"]; - }; - deleteRepoSubscription: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/subscription"]["response"]; - }; - deleteThreadSubscription: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /notifications/threads/{thread_id}/subscription"]["response"]; - }; - getFeeds: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /feeds"]["response"]; - }; - getRepoSubscription: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/subscription"]["response"]; - }; - getThread: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /notifications/threads/{thread_id}"]["response"]; - }; - getThreadSubscriptionForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /notifications/threads/{thread_id}/subscription"]["response"]; - }; - listEventsForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/events"]["response"]; - }; - listNotificationsForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /notifications"]["response"]; - }; - listOrgEventsForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/events/orgs/{org}"]["response"]; - }; - listPublicEvents: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /events"]["response"]; - }; - listPublicEventsForRepoNetwork: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /networks/{owner}/{repo}/events"]["response"]; - }; - listPublicEventsForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/events/public"]["response"]; - }; - listPublicOrgEvents: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/events"]["response"]; - }; - listReceivedEventsForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/received_events"]["response"]; - }; - listReceivedPublicEventsForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/received_events/public"]["response"]; - }; - listRepoEvents: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/events"]["response"]; - }; - listRepoNotificationsForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/notifications"]["response"]; - }; - listReposStarredByAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/starred"]["response"]; - }; - listReposStarredByUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/starred"]["response"]; - }; - listReposWatchedByUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/subscriptions"]["response"]; - }; - listStargazersForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/stargazers"]["response"]; - }; - listWatchedReposForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/subscriptions"]["response"]; - }; - listWatchersForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/subscribers"]["response"]; - }; - markNotificationsAsRead: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /notifications"]["response"]; - }; - markRepoNotificationsAsRead: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/notifications"]["response"]; - }; - markThreadAsRead: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /notifications/threads/{thread_id}"]["response"]; - }; - setRepoSubscription: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/subscription"]["response"]; - }; - setThreadSubscription: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /notifications/threads/{thread_id}/subscription"]["response"]; - }; - starRepoForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /user/starred/{owner}/{repo}"]["response"]; - }; - unstarRepoForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/starred/{owner}/{repo}"]["response"]; - }; - }; - apps: { - addRepoToInstallation: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /user/installations/{installation_id}/repositories/{repository_id}"]["response"]; - }; - checkToken: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /applications/{client_id}/token"]["response"]; - }; - createContentAttachment: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /content_references/{content_reference_id}/attachments"]["response"]; - }; - createFromManifest: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /app-manifests/{code}/conversions"]["response"]; - }; - createInstallationAccessToken: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /app/installations/{installation_id}/access_tokens"]["response"]; - }; - deleteAuthorization: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /applications/{client_id}/grant"]["response"]; - }; - deleteInstallation: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /app/installations/{installation_id}"]["response"]; - }; - deleteToken: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /applications/{client_id}/token"]["response"]; - }; - getAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /app"]["response"]; - }; - getBySlug: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /apps/{app_slug}"]["response"]; - }; - getInstallation: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /app/installations/{installation_id}"]["response"]; - }; - getOrgInstallation: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/installation"]["response"]; - }; - getRepoInstallation: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/installation"]["response"]; - }; - getSubscriptionPlanForAccount: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /marketplace_listing/accounts/{account_id}"]["response"]; - }; - getSubscriptionPlanForAccountStubbed: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /marketplace_listing/stubbed/accounts/{account_id}"]["response"]; - }; - getUserInstallation: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/installation"]["response"]; - }; - getWebhookConfigForApp: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /app/hook/config"]["response"]; - }; - listAccountsForPlan: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /marketplace_listing/plans/{plan_id}/accounts"]["response"]; - }; - listAccountsForPlanStubbed: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"]["response"]; - }; - listInstallationReposForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/installations/{installation_id}/repositories"]["response"]; - }; - listInstallations: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /app/installations"]["response"]; - }; - listInstallationsForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/installations"]["response"]; - }; - listPlans: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /marketplace_listing/plans"]["response"]; - }; - listPlansStubbed: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /marketplace_listing/stubbed/plans"]["response"]; - }; - listReposAccessibleToInstallation: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /installation/repositories"]["response"]; - }; - listSubscriptionsForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/marketplace_purchases"]["response"]; - }; - listSubscriptionsForAuthenticatedUserStubbed: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/marketplace_purchases/stubbed"]["response"]; - }; - removeRepoFromInstallation: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/installations/{installation_id}/repositories/{repository_id}"]["response"]; - }; - resetToken: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /applications/{client_id}/token"]["response"]; - }; - revokeInstallationAccessToken: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /installation/token"]["response"]; - }; - scopeToken: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /applications/{client_id}/token/scoped"]["response"]; - }; - suspendInstallation: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /app/installations/{installation_id}/suspended"]["response"]; - }; - unsuspendInstallation: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /app/installations/{installation_id}/suspended"]["response"]; - }; - updateWebhookConfigForApp: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /app/hook/config"]["response"]; - }; - }; - billing: { - getGithubActionsBillingOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/settings/billing/actions"]["response"]; - }; - getGithubActionsBillingUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/settings/billing/actions"]["response"]; - }; - getGithubPackagesBillingOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/settings/billing/packages"]["response"]; - }; - getGithubPackagesBillingUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/settings/billing/packages"]["response"]; - }; - getSharedStorageBillingOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/settings/billing/shared-storage"]["response"]; - }; - getSharedStorageBillingUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/settings/billing/shared-storage"]["response"]; - }; - }; - checks: { - create: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/check-runs"]["response"]; - }; - createSuite: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/check-suites"]["response"]; - }; - get: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"]["response"]; - }; - getSuite: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"]["response"]; - }; - listAnnotations: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"]["response"]; - }; - listForRef: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"]["response"]; - }; - listForSuite: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"]["response"]; - }; - listSuitesForRef: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"]["response"]; - }; - rerequestSuite: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"]["response"]; - }; - setSuitesPreferences: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/check-suites/preferences"]["response"]; - }; - update: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]["response"]; - }; - }; - codeScanning: { - deleteAnalysis: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"]["response"]; - }; - getAlert: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"]["response"]; - }; - getAnalysis: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"]["response"]; - }; - getSarif: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"]["response"]; - }; - listAlertsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts"]["response"]; - }; - listAlertsInstances: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"]["response"]; - }; - listRecentAnalyses: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/analyses"]["response"]; - }; - updateAlert: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"]["response"]; - }; - uploadSarif: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/code-scanning/sarifs"]["response"]; - }; - }; - codesOfConduct: { - getAllCodesOfConduct: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /codes_of_conduct"]["response"]; - }; - getConductCode: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /codes_of_conduct/{key}"]["response"]; - }; - getForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/community/code_of_conduct"]["response"]; - }; - }; - emojis: { - get: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /emojis"]["response"]; - }; - }; - enterpriseAdmin: { - disableSelectedOrganizationGithubActionsEnterprise: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"]["response"]; - }; - enableSelectedOrganizationGithubActionsEnterprise: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"]["response"]; - }; - getAllowedActionsEnterprise: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /enterprises/{enterprise}/actions/permissions/selected-actions"]["response"]; - }; - getGithubActionsPermissionsEnterprise: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /enterprises/{enterprise}/actions/permissions"]["response"]; - }; - listSelectedOrganizationsEnabledGithubActionsEnterprise: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /enterprises/{enterprise}/actions/permissions/organizations"]["response"]; - }; - setAllowedActionsEnterprise: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"]["response"]; - }; - setGithubActionsPermissionsEnterprise: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /enterprises/{enterprise}/actions/permissions"]["response"]; - }; - setSelectedOrganizationsEnabledGithubActionsEnterprise: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /enterprises/{enterprise}/actions/permissions/organizations"]["response"]; - }; - }; - gists: { - checkIsStarred: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /gists/{gist_id}/star"]["response"]; - }; - create: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /gists"]["response"]; - }; - createComment: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /gists/{gist_id}/comments"]["response"]; - }; - delete: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /gists/{gist_id}"]["response"]; - }; - deleteComment: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /gists/{gist_id}/comments/{comment_id}"]["response"]; - }; - fork: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /gists/{gist_id}/forks"]["response"]; - }; - get: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /gists/{gist_id}"]["response"]; - }; - getComment: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /gists/{gist_id}/comments/{comment_id}"]["response"]; - }; - getRevision: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /gists/{gist_id}/{sha}"]["response"]; - }; - list: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /gists"]["response"]; - }; - listComments: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /gists/{gist_id}/comments"]["response"]; - }; - listCommits: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /gists/{gist_id}/commits"]["response"]; - }; - listForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/gists"]["response"]; - }; - listForks: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /gists/{gist_id}/forks"]["response"]; - }; - listPublic: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /gists/public"]["response"]; - }; - listStarred: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /gists/starred"]["response"]; - }; - star: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /gists/{gist_id}/star"]["response"]; - }; - unstar: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /gists/{gist_id}/star"]["response"]; - }; - update: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /gists/{gist_id}"]["response"]; - }; - updateComment: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /gists/{gist_id}/comments/{comment_id}"]["response"]; - }; - }; - git: { - createBlob: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/git/blobs"]["response"]; - }; - createCommit: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/git/commits"]["response"]; - }; - createRef: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/git/refs"]["response"]; - }; - createTag: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/git/tags"]["response"]; - }; - createTree: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/git/trees"]["response"]; - }; - deleteRef: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/git/refs/{ref}"]["response"]; - }; - getBlob: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"]["response"]; - }; - getCommit: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"]["response"]; - }; - getRef: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/git/ref/{ref}"]["response"]; - }; - getTag: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"]["response"]; - }; - getTree: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"]["response"]; - }; - listMatchingRefs: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"]["response"]; - }; - updateRef: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]["response"]; - }; - }; - gitignore: { - getAllTemplates: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /gitignore/templates"]["response"]; - }; - getTemplate: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /gitignore/templates/{name}"]["response"]; - }; - }; - interactions: { - getRestrictionsForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/interaction-limits"]["response"]; - }; - getRestrictionsForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/interaction-limits"]["response"]; - }; - getRestrictionsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/interaction-limits"]["response"]; - }; - getRestrictionsForYourPublicRepos: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/interaction-limits"]["response"]; - }; - removeRestrictionsForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/interaction-limits"]["response"]; - }; - removeRestrictionsForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/interaction-limits"]["response"]; - }; - removeRestrictionsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/interaction-limits"]["response"]; - }; - removeRestrictionsForYourPublicRepos: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/interaction-limits"]["response"]; - }; - setRestrictionsForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /user/interaction-limits"]["response"]; - }; - setRestrictionsForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/interaction-limits"]["response"]; - }; - setRestrictionsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/interaction-limits"]["response"]; - }; - setRestrictionsForYourPublicRepos: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /user/interaction-limits"]["response"]; - }; - }; - issues: { - addAssignees: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"]["response"]; - }; - addLabels: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; - }; - checkUserCanBeAssigned: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/assignees/{assignee}"]["response"]; - }; - create: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/issues"]["response"]; - }; - createComment: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"]["response"]; - }; - createLabel: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/labels"]["response"]; - }; - createMilestone: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/milestones"]["response"]; - }; - deleteComment: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"]["response"]; - }; - deleteLabel: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/labels/{name}"]["response"]; - }; - deleteMilestone: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"]["response"]; - }; - get: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}"]["response"]; - }; - getComment: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"]["response"]; - }; - getEvent: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/issues/events/{event_id}"]["response"]; - }; - getLabel: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/labels/{name}"]["response"]; - }; - getMilestone: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/milestones/{milestone_number}"]["response"]; - }; - list: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /issues"]["response"]; - }; - listAssignees: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/assignees"]["response"]; - }; - listComments: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"]["response"]; - }; - listCommentsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/issues/comments"]["response"]; - }; - listEvents: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/events"]["response"]; - }; - listEventsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/issues/events"]["response"]; - }; - listEventsForTimeline: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"]["response"]; - }; - listForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/issues"]["response"]; - }; - listForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/issues"]["response"]; - }; - listForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/issues"]["response"]; - }; - listLabelsForMilestone: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"]["response"]; - }; - listLabelsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/labels"]["response"]; - }; - listLabelsOnIssue: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; - }; - listMilestones: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/milestones"]["response"]; - }; - lock: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"]["response"]; - }; - removeAllLabels: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; - }; - removeAssignees: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"]["response"]; - }; - removeLabel: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"]["response"]; - }; - setLabels: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; - }; - unlock: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"]["response"]; - }; - update: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/issues/{issue_number}"]["response"]; - }; - updateComment: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"]["response"]; - }; - updateLabel: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/labels/{name}"]["response"]; - }; - updateMilestone: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"]["response"]; - }; - }; - licenses: { - get: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /licenses/{license}"]["response"]; - }; - getAllCommonlyUsed: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /licenses"]["response"]; - }; - getForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/license"]["response"]; - }; - }; - markdown: { - render: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /markdown"]["response"]; - }; - renderRaw: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /markdown/raw"]["response"]; - }; - }; - meta: { - get: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /meta"]["response"]; - }; - getOctocat: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /octocat"]["response"]; - }; - getZen: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /zen"]["response"]; - }; - root: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /"]["response"]; - }; - }; - migrations: { - cancelImport: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/import"]["response"]; - }; - deleteArchiveForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/migrations/{migration_id}/archive"]["response"]; - }; - deleteArchiveForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/migrations/{migration_id}/archive"]["response"]; - }; - downloadArchiveForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/migrations/{migration_id}/archive"]["response"]; - }; - getArchiveForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/migrations/{migration_id}/archive"]["response"]; - }; - getCommitAuthors: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/import/authors"]["response"]; - }; - getImportStatus: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/import"]["response"]; - }; - getLargeFiles: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/import/large_files"]["response"]; - }; - getStatusForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/migrations/{migration_id}"]["response"]; - }; - getStatusForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/migrations/{migration_id}"]["response"]; - }; - listForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/migrations"]["response"]; - }; - listForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/migrations"]["response"]; - }; - listReposForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/migrations/{migration_id}/repositories"]["response"]; - }; - listReposForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/migrations/{migration_id}/repositories"]["response"]; - }; - mapCommitAuthor: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"]["response"]; - }; - setLfsPreference: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/import/lfs"]["response"]; - }; - startForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /user/migrations"]["response"]; - }; - startForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/migrations"]["response"]; - }; - startImport: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/import"]["response"]; - }; - unlockRepoForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"]["response"]; - }; - unlockRepoForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"]["response"]; - }; - updateImport: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/import"]["response"]; - }; - }; - orgs: { - blockUser: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/blocks/{username}"]["response"]; - }; - cancelInvitation: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/invitations/{invitation_id}"]["response"]; - }; - checkBlockedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/blocks/{username}"]["response"]; - }; - checkMembershipForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/members/{username}"]["response"]; - }; - checkPublicMembershipForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/public_members/{username}"]["response"]; - }; - convertMemberToOutsideCollaborator: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/outside_collaborators/{username}"]["response"]; - }; - createInvitation: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/invitations"]["response"]; - }; - createWebhook: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/hooks"]["response"]; - }; - deleteWebhook: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/hooks/{hook_id}"]["response"]; - }; - get: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}"]["response"]; - }; - getMembershipForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/memberships/orgs/{org}"]["response"]; - }; - getMembershipForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/memberships/{username}"]["response"]; - }; - getWebhook: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/hooks/{hook_id}"]["response"]; - }; - getWebhookConfigForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/hooks/{hook_id}/config"]["response"]; - }; - list: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /organizations"]["response"]; - }; - listAppInstallations: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/installations"]["response"]; - }; - listBlockedUsers: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/blocks"]["response"]; - }; - listFailedInvitations: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/failed_invitations"]["response"]; - }; - listForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/orgs"]["response"]; - }; - listForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/orgs"]["response"]; - }; - listInvitationTeams: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/invitations/{invitation_id}/teams"]["response"]; - }; - listMembers: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/members"]["response"]; - }; - listMembershipsForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/memberships/orgs"]["response"]; - }; - listOutsideCollaborators: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/outside_collaborators"]["response"]; - }; - listPendingInvitations: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/invitations"]["response"]; - }; - listPublicMembers: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/public_members"]["response"]; - }; - listWebhooks: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/hooks"]["response"]; - }; - pingWebhook: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/hooks/{hook_id}/pings"]["response"]; - }; - removeMember: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/members/{username}"]["response"]; - }; - removeMembershipForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/memberships/{username}"]["response"]; - }; - removeOutsideCollaborator: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/outside_collaborators/{username}"]["response"]; - }; - removePublicMembershipForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/public_members/{username}"]["response"]; - }; - setMembershipForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/memberships/{username}"]["response"]; - }; - setPublicMembershipForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/public_members/{username}"]["response"]; - }; - unblockUser: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/blocks/{username}"]["response"]; - }; - update: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /orgs/{org}"]["response"]; - }; - updateMembershipForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /user/memberships/orgs/{org}"]["response"]; - }; - updateWebhook: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /orgs/{org}/hooks/{hook_id}"]["response"]; - }; - updateWebhookConfigForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /orgs/{org}/hooks/{hook_id}/config"]["response"]; - }; - }; - packages: { - deletePackageForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/packages/{package_type}/{package_name}"]["response"]; - }; - deletePackageForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/packages/{package_type}/{package_name}"]["response"]; - }; - deletePackageVersionForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; - }; - deletePackageVersionForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; - }; - getAllPackageVersionsForAPackageOwnedByAnOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"]["response"]; - }; - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/packages/{package_type}/{package_name}/versions"]["response"]; - }; - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/packages/{package_type}/{package_name}/versions"]["response"]; - }; - getAllPackageVersionsForPackageOwnedByOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"]["response"]; - }; - getAllPackageVersionsForPackageOwnedByUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/packages/{package_type}/{package_name}/versions"]["response"]; - }; - getPackageForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/packages/{package_type}/{package_name}"]["response"]; - }; - getPackageForOrganization: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}"]["response"]; - }; - getPackageForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/packages/{package_type}/{package_name}"]["response"]; - }; - getPackageVersionForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; - }; - getPackageVersionForOrganization: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; - }; - getPackageVersionForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; - }; - restorePackageForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /user/packages/{package_type}/{package_name}/restore{?token}"]["response"]; - }; - restorePackageForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"]["response"]; - }; - restorePackageVersionForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]["response"]; - }; - restorePackageVersionForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]["response"]; - }; - }; - projects: { - addCollaborator: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /projects/{project_id}/collaborators/{username}"]["response"]; - }; - createCard: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /projects/columns/{column_id}/cards"]["response"]; - }; - createColumn: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /projects/{project_id}/columns"]["response"]; - }; - createForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /user/projects"]["response"]; - }; - createForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/projects"]["response"]; - }; - createForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/projects"]["response"]; - }; - delete: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /projects/{project_id}"]["response"]; - }; - deleteCard: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /projects/columns/cards/{card_id}"]["response"]; - }; - deleteColumn: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /projects/columns/{column_id}"]["response"]; - }; - get: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /projects/{project_id}"]["response"]; - }; - getCard: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /projects/columns/cards/{card_id}"]["response"]; - }; - getColumn: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /projects/columns/{column_id}"]["response"]; - }; - getPermissionForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /projects/{project_id}/collaborators/{username}/permission"]["response"]; - }; - listCards: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /projects/columns/{column_id}/cards"]["response"]; - }; - listCollaborators: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /projects/{project_id}/collaborators"]["response"]; - }; - listColumns: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /projects/{project_id}/columns"]["response"]; - }; - listForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/projects"]["response"]; - }; - listForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/projects"]["response"]; - }; - listForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/projects"]["response"]; - }; - moveCard: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /projects/columns/cards/{card_id}/moves"]["response"]; - }; - moveColumn: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /projects/columns/{column_id}/moves"]["response"]; - }; - removeCollaborator: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /projects/{project_id}/collaborators/{username}"]["response"]; - }; - update: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /projects/{project_id}"]["response"]; - }; - updateCard: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /projects/columns/cards/{card_id}"]["response"]; - }; - updateColumn: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /projects/columns/{column_id}"]["response"]; - }; - }; - pulls: { - checkIfMerged: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"]["response"]; - }; - create: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/pulls"]["response"]; - }; - createReplyForReviewComment: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"]["response"]; - }; - createReview: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"]["response"]; - }; - createReviewComment: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"]["response"]; - }; - deletePendingReview: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"]["response"]; - }; - deleteReviewComment: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"]["response"]; - }; - dismissReview: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"]["response"]; - }; - get: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}"]["response"]; - }; - getReview: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"]["response"]; - }; - getReviewComment: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"]["response"]; - }; - list: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls"]["response"]; - }; - listCommentsForReview: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"]["response"]; - }; - listCommits: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"]["response"]; - }; - listFiles: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"]["response"]; - }; - listRequestedReviewers: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"]["response"]; - }; - listReviewComments: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"]["response"]; - }; - listReviewCommentsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments"]["response"]; - }; - listReviews: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"]["response"]; - }; - merge: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"]["response"]; - }; - removeRequestedReviewers: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"]["response"]; - }; - requestReviewers: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"]["response"]; - }; - submitReview: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"]["response"]; - }; - update: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"]["response"]; - }; - updateBranch: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"]["response"]; - }; - updateReview: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"]["response"]; - }; - updateReviewComment: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"]["response"]; - }; - }; - rateLimit: { - get: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /rate_limit"]["response"]; - }; - }; - reactions: { - createForCommitComment: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"]["response"]; - }; - createForIssue: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"]["response"]; - }; - createForIssueComment: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"]["response"]; - }; - createForPullRequestReviewComment: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"]["response"]; - }; - createForTeamDiscussionCommentInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["response"]; - }; - createForTeamDiscussionInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]["response"]; - }; - deleteForCommitComment: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"]["response"]; - }; - deleteForIssue: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"]["response"]; - }; - deleteForIssueComment: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"]["response"]; - }; - deleteForPullRequestComment: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"]["response"]; - }; - deleteForTeamDiscussion: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"]["response"]; - }; - deleteForTeamDiscussionComment: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"]["response"]; - }; - deleteLegacy: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /reactions/{reaction_id}"]["response"]; - }; - listForCommitComment: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"]["response"]; - }; - listForIssue: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"]["response"]; - }; - listForIssueComment: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"]["response"]; - }; - listForPullRequestReviewComment: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"]["response"]; - }; - listForTeamDiscussionCommentInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["response"]; - }; - listForTeamDiscussionInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]["response"]; - }; - }; - repos: { - acceptInvitation: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /user/repository_invitations/{invitation_id}"]["response"]; - }; - addAppAccessRestrictions: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"]["response"]; - }; - addCollaborator: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/collaborators/{username}"]["response"]; - }; - addStatusCheckContexts: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"]["response"]; - }; - addTeamAccessRestrictions: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"]["response"]; - }; - addUserAccessRestrictions: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"]["response"]; - }; - checkCollaborator: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/collaborators/{username}"]["response"]; - }; - checkVulnerabilityAlerts: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/vulnerability-alerts"]["response"]; - }; - compareCommits: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/compare/{base}...{head}"]["response"]; - }; - createCommitComment: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"]["response"]; - }; - createCommitSignatureProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"]["response"]; - }; - createCommitStatus: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/statuses/{sha}"]["response"]; - }; - createDeployKey: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/keys"]["response"]; - }; - createDeployment: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/deployments"]["response"]; - }; - createDeploymentStatus: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"]["response"]; - }; - createDispatchEvent: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/dispatches"]["response"]; - }; - createForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /user/repos"]["response"]; - }; - createFork: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/forks"]["response"]; - }; - createInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/repos"]["response"]; - }; - createOrUpdateEnvironment: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/environments/{environment_name}"]["response"]; - }; - createOrUpdateFileContents: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/contents/{path}"]["response"]; - }; - createPagesSite: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/pages"]["response"]; - }; - createRelease: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/releases"]["response"]; - }; - createUsingTemplate: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{template_owner}/{template_repo}/generate"]["response"]; - }; - createWebhook: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/hooks"]["response"]; - }; - declineInvitation: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/repository_invitations/{invitation_id}"]["response"]; - }; - delete: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}"]["response"]; - }; - deleteAccessRestrictions: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"]["response"]; - }; - deleteAdminBranchProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"]["response"]; - }; - deleteAnEnvironment: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/environments/{environment_name}"]["response"]; - }; - deleteBranchProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"]["response"]; - }; - deleteCommitComment: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/comments/{comment_id}"]["response"]; - }; - deleteCommitSignatureProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"]["response"]; - }; - deleteDeployKey: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/keys/{key_id}"]["response"]; - }; - deleteDeployment: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"]["response"]; - }; - deleteFile: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/contents/{path}"]["response"]; - }; - deleteInvitation: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"]["response"]; - }; - deletePagesSite: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/pages"]["response"]; - }; - deletePullRequestReviewProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"]["response"]; - }; - deleteRelease: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/releases/{release_id}"]["response"]; - }; - deleteReleaseAsset: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"]["response"]; - }; - deleteWebhook: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"]["response"]; - }; - disableAutomatedSecurityFixes: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/automated-security-fixes"]["response"]; - }; - disableVulnerabilityAlerts: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/vulnerability-alerts"]["response"]; - }; - downloadArchive: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/zipball/{ref}"]["response"]; - }; - downloadTarballArchive: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/tarball/{ref}"]["response"]; - }; - downloadZipballArchive: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/zipball/{ref}"]["response"]; - }; - enableAutomatedSecurityFixes: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/automated-security-fixes"]["response"]; - }; - enableVulnerabilityAlerts: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/vulnerability-alerts"]["response"]; - }; - get: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}"]["response"]; - }; - getAccessRestrictions: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"]["response"]; - }; - getAdminBranchProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"]["response"]; - }; - getAllEnvironments: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/environments"]["response"]; - }; - getAllStatusCheckContexts: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"]["response"]; - }; - getAllTopics: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/topics"]["response"]; - }; - getAppsWithAccessToProtectedBranch: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"]["response"]; - }; - getBranch: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}"]["response"]; - }; - getBranchProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection"]["response"]; - }; - getClones: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/traffic/clones"]["response"]; - }; - getCodeFrequencyStats: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/stats/code_frequency"]["response"]; - }; - getCollaboratorPermissionLevel: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/collaborators/{username}/permission"]["response"]; - }; - getCombinedStatusForRef: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/status"]["response"]; - }; - getCommit: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}"]["response"]; - }; - getCommitActivityStats: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/stats/commit_activity"]["response"]; - }; - getCommitComment: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/comments/{comment_id}"]["response"]; - }; - getCommitSignatureProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"]["response"]; - }; - getCommunityProfileMetrics: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/community/profile"]["response"]; - }; - getContent: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/contents/{path}"]["response"]; - }; - getContributorsStats: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/stats/contributors"]["response"]; - }; - getDeployKey: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/keys/{key_id}"]["response"]; - }; - getDeployment: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}"]["response"]; - }; - getDeploymentStatus: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"]["response"]; - }; - getEnvironment: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}"]["response"]; - }; - getLatestPagesBuild: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pages/builds/latest"]["response"]; - }; - getLatestRelease: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/releases/latest"]["response"]; - }; - getPages: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pages"]["response"]; - }; - getPagesBuild: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pages/builds/{build_id}"]["response"]; - }; - getParticipationStats: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/stats/participation"]["response"]; - }; - getPullRequestReviewProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"]["response"]; - }; - getPunchCardStats: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/stats/punch_card"]["response"]; - }; - getReadme: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/readme"]["response"]; - }; - getReadmeInDirectory: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/readme/{dir}"]["response"]; - }; - getRelease: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}"]["response"]; - }; - getReleaseAsset: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"]["response"]; - }; - getReleaseByTag: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/releases/tags/{tag}"]["response"]; - }; - getStatusChecksProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"]["response"]; - }; - getTeamsWithAccessToProtectedBranch: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"]["response"]; - }; - getTopPaths: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/traffic/popular/paths"]["response"]; - }; - getTopReferrers: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/traffic/popular/referrers"]["response"]; - }; - getUsersWithAccessToProtectedBranch: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"]["response"]; - }; - getViews: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/traffic/views"]["response"]; - }; - getWebhook: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}"]["response"]; - }; - getWebhookConfigForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"]["response"]; - }; - listBranches: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/branches"]["response"]; - }; - listBranchesForHeadCommit: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"]["response"]; - }; - listCollaborators: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/collaborators"]["response"]; - }; - listCommentsForCommit: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"]["response"]; - }; - listCommitCommentsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/comments"]["response"]; - }; - listCommitStatusesForRef: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/statuses"]["response"]; - }; - listCommits: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/commits"]["response"]; - }; - listContributors: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/contributors"]["response"]; - }; - listDeployKeys: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/keys"]["response"]; - }; - listDeploymentStatuses: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"]["response"]; - }; - listDeployments: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/deployments"]["response"]; - }; - listForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/repos"]["response"]; - }; - listForOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/repos"]["response"]; - }; - listForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/repos"]["response"]; - }; - listForks: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/forks"]["response"]; - }; - listInvitations: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/invitations"]["response"]; - }; - listInvitationsForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/repository_invitations"]["response"]; - }; - listLanguages: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/languages"]["response"]; - }; - listPagesBuilds: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/pages/builds"]["response"]; - }; - listPublic: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repositories"]["response"]; - }; - listPullRequestsAssociatedWithCommit: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"]["response"]; - }; - listReleaseAssets: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/assets"]["response"]; - }; - listReleases: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/releases"]["response"]; - }; - listTags: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/tags"]["response"]; - }; - listTeams: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/teams"]["response"]; - }; - listWebhooks: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/hooks"]["response"]; - }; - merge: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/merges"]["response"]; - }; - pingWebhook: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"]["response"]; - }; - removeAppAccessRestrictions: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"]["response"]; - }; - removeCollaborator: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/collaborators/{username}"]["response"]; - }; - removeStatusCheckContexts: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"]["response"]; - }; - removeStatusCheckProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"]["response"]; - }; - removeTeamAccessRestrictions: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"]["response"]; - }; - removeUserAccessRestrictions: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"]["response"]; - }; - renameBranch: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/rename"]["response"]; - }; - replaceAllTopics: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/topics"]["response"]; - }; - requestPagesBuild: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/pages/builds"]["response"]; - }; - setAdminBranchProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"]["response"]; - }; - setAppAccessRestrictions: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"]["response"]; - }; - setStatusCheckContexts: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"]["response"]; - }; - setTeamAccessRestrictions: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"]["response"]; - }; - setUserAccessRestrictions: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"]["response"]; - }; - testPushWebhook: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"]["response"]; - }; - transfer: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /repos/{owner}/{repo}/transfer"]["response"]; - }; - update: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}"]["response"]; - }; - updateBranchProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection"]["response"]; - }; - updateCommitComment: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/comments/{comment_id}"]["response"]; - }; - updateInformationAboutPagesSite: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /repos/{owner}/{repo}/pages"]["response"]; - }; - updateInvitation: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"]["response"]; - }; - updatePullRequestReviewProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"]["response"]; - }; - updateRelease: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/releases/{release_id}"]["response"]; - }; - updateReleaseAsset: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"]["response"]; - }; - updateStatusCheckPotection: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"]["response"]; - }; - updateStatusCheckProtection: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"]["response"]; - }; - updateWebhook: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"]["response"]; - }; - updateWebhookConfigForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"]["response"]; - }; - uploadReleaseAsset: { - parameters: RequestParameters & Omit; - response: Endpoints["POST {origin}/repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}"]["response"]; - }; - }; - search: { - code: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /search/code"]["response"]; - }; - commits: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /search/commits"]["response"]; - }; - issuesAndPullRequests: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /search/issues"]["response"]; - }; - labels: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /search/labels"]["response"]; - }; - repos: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /search/repositories"]["response"]; - }; - topics: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /search/topics"]["response"]; - }; - users: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /search/users"]["response"]; - }; - }; - secretScanning: { - getAlert: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]["response"]; - }; - listAlertsForRepo: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts"]["response"]; - }; - updateAlert: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]["response"]; - }; - }; - teams: { - addOrUpdateMembershipForUserInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"]["response"]; - }; - addOrUpdateProjectPermissionsInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"]["response"]; - }; - addOrUpdateRepoPermissionsInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"]["response"]; - }; - checkPermissionsForProjectInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"]["response"]; - }; - checkPermissionsForRepoInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"]["response"]; - }; - create: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/teams"]["response"]; - }; - createDiscussionCommentInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"]["response"]; - }; - createDiscussionInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions"]["response"]; - }; - deleteDiscussionCommentInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"]["response"]; - }; - deleteDiscussionInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"]["response"]; - }; - deleteInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}"]["response"]; - }; - getByName: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}"]["response"]; - }; - getDiscussionCommentInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"]["response"]; - }; - getDiscussionInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"]["response"]; - }; - getMembershipForUserInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"]["response"]; - }; - list: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams"]["response"]; - }; - listChildInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/teams"]["response"]; - }; - listDiscussionCommentsInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"]["response"]; - }; - listDiscussionsInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions"]["response"]; - }; - listForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/teams"]["response"]; - }; - listMembersInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/members"]["response"]; - }; - listPendingInvitationsInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/invitations"]["response"]; - }; - listProjectsInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/projects"]["response"]; - }; - listReposInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /orgs/{org}/teams/{team_slug}/repos"]["response"]; - }; - removeMembershipForUserInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"]["response"]; - }; - removeProjectInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"]["response"]; - }; - removeRepoInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"]["response"]; - }; - updateDiscussionCommentInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"]["response"]; - }; - updateDiscussionInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"]["response"]; - }; - updateInOrg: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /orgs/{org}/teams/{team_slug}"]["response"]; - }; - }; - users: { - addEmailForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /user/emails"]["response"]; - }; - block: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /user/blocks/{username}"]["response"]; - }; - checkBlocked: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/blocks/{username}"]["response"]; - }; - checkFollowingForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/following/{target_user}"]["response"]; - }; - checkPersonIsFollowedByAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/following/{username}"]["response"]; - }; - createGpgKeyForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /user/gpg_keys"]["response"]; - }; - createPublicSshKeyForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["POST /user/keys"]["response"]; - }; - deleteEmailForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/emails"]["response"]; - }; - deleteGpgKeyForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/gpg_keys/{gpg_key_id}"]["response"]; - }; - deletePublicSshKeyForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/keys/{key_id}"]["response"]; - }; - follow: { - parameters: RequestParameters & Omit; - response: Endpoints["PUT /user/following/{username}"]["response"]; - }; - getAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user"]["response"]; - }; - getByUsername: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}"]["response"]; - }; - getContextForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/hovercard"]["response"]; - }; - getGpgKeyForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/gpg_keys/{gpg_key_id}"]["response"]; - }; - getPublicSshKeyForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/keys/{key_id}"]["response"]; - }; - list: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users"]["response"]; - }; - listBlockedByAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/blocks"]["response"]; - }; - listEmailsForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/emails"]["response"]; - }; - listFollowedByAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/following"]["response"]; - }; - listFollowersForAuthenticatedUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/followers"]["response"]; - }; - listFollowersForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/followers"]["response"]; - }; - listFollowingForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/following"]["response"]; - }; - listGpgKeysForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/gpg_keys"]["response"]; - }; - listGpgKeysForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/gpg_keys"]["response"]; - }; - listPublicEmailsForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/public_emails"]["response"]; - }; - listPublicKeysForUser: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /users/{username}/keys"]["response"]; - }; - listPublicSshKeysForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["GET /user/keys"]["response"]; - }; - setPrimaryEmailVisibilityForAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /user/email/visibility"]["response"]; - }; - unblock: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/blocks/{username}"]["response"]; - }; - unfollow: { - parameters: RequestParameters & Omit; - response: Endpoints["DELETE /user/following/{username}"]["response"]; - }; - updateAuthenticated: { - parameters: RequestParameters & Omit; - response: Endpoints["PATCH /user"]["response"]; - }; - }; -}; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/index.d.ts b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/index.d.ts deleted file mode 100644 index beef3769..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/index.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { Octokit } from "@octokit/core"; -export { RestEndpointMethodTypes } from "./generated/parameters-and-response-types"; -import { Api } from "./types"; -export declare function restEndpointMethods(octokit: Octokit): Api; -export declare namespace restEndpointMethods { - var VERSION: string; -} diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/types.d.ts b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/types.d.ts deleted file mode 100644 index 5a0caa5a..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/types.d.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { Route, RequestParameters } from "@octokit/types"; -import { RestEndpointMethods } from "./generated/method-types"; -export declare type Api = RestEndpointMethods & { - rest: RestEndpointMethods; -}; -export declare type EndpointDecorations = { - mapToData?: string; - deprecated?: string; - renamed?: [string, string]; - renamedParameters?: { - [name: string]: string; - }; -}; -export declare type EndpointsDefaultsAndDecorations = { - [scope: string]: { - [methodName: string]: [Route, RequestParameters?, EndpointDecorations?]; - }; -}; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts deleted file mode 100644 index 3608bee8..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare const VERSION = "4.15.1"; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js deleted file mode 100644 index d802be8a..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js +++ /dev/null @@ -1,1479 +0,0 @@ -const Endpoints = { - actions: { - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", - ], - cancelWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel", - ], - createOrUpdateEnvironmentSecret: [ - "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", - ], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", - ], - createRegistrationTokenForOrg: [ - "POST /orgs/{org}/actions/runners/registration-token", - ], - createRegistrationTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/registration-token", - ], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/remove-token", - ], - createWorkflowDispatch: [ - "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches", - ], - deleteArtifact: [ - "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}", - ], - deleteEnvironmentSecret: [ - "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", - ], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", - ], - deleteSelfHostedRunnerFromOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}", - ], - deleteSelfHostedRunnerFromRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}", - ], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: [ - "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs", - ], - disableSelectedRepositoryGithubActionsOrganization: [ - "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}", - ], - disableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable", - ], - downloadArtifact: [ - "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}", - ], - downloadJobLogsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", - ], - downloadWorkflowRunLogs: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs", - ], - enableSelectedRepositoryGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}", - ], - enableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable", - ], - getAllowedActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/selected-actions", - ], - getAllowedActionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/selected-actions", - ], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getEnvironmentPublicKey: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key", - ], - getEnvironmentSecret: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", - ], - getGithubActionsPermissionsOrganization: [ - "GET /orgs/{org}/actions/permissions", - ], - getGithubActionsPermissionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions", - ], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getPendingDeploymentsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", - ], - getRepoPermissions: [ - "GET /repos/{owner}/{repo}/actions/permissions", - {}, - { renamed: ["actions", "getGithubActionsPermissionsRepository"] }, - ], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getReviewsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals", - ], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/{runner_id}", - ], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunUsage: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing", - ], - getWorkflowUsage: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing", - ], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets", - ], - listJobsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", - ], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/downloads", - ], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", - ], - listSelectedRepositoriesEnabledGithubActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/repositories", - ], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", - ], - listWorkflowRuns: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", - ], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", - ], - reviewPendingDeploymentsForRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", - ], - setAllowedActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/selected-actions", - ], - setAllowedActionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions", - ], - setGithubActionsPermissionsOrganization: [ - "PUT /orgs/{org}/actions/permissions", - ], - setGithubActionsPermissionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions", - ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories", - ], - setSelectedRepositoriesEnabledGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories", - ], - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: [ - "DELETE /notifications/threads/{thread_id}/subscription", - ], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: [ - "GET /notifications/threads/{thread_id}/subscription", - ], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: [ - "GET /users/{username}/events/orgs/{org}", - ], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: [ - "GET /users/{username}/received_events/public", - ], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/notifications", - ], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: [ - "PUT /notifications/threads/{thread_id}/subscription", - ], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"], - }, - apps: { - addRepoToInstallation: [ - "PUT /user/installations/{installation_id}/repositories/{repository_id}", - ], - checkToken: ["POST /applications/{client_id}/token"], - createContentAttachment: [ - "POST /content_references/{content_reference_id}/attachments", - { mediaType: { previews: ["corsair"] } }, - ], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: [ - "POST /app/installations/{installation_id}/access_tokens", - ], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: [ - "GET /marketplace_listing/accounts/{account_id}", - ], - getSubscriptionPlanForAccountStubbed: [ - "GET /marketplace_listing/stubbed/accounts/{account_id}", - ], - getUserInstallation: ["GET /users/{username}/installation"], - getWebhookConfigForApp: ["GET /app/hook/config"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: [ - "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", - ], - listInstallationReposForAuthenticatedUser: [ - "GET /user/installations/{installation_id}/repositories", - ], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: [ - "GET /user/marketplace_purchases/stubbed", - ], - removeRepoFromInstallation: [ - "DELETE /user/installations/{installation_id}/repositories/{repository_id}", - ], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - scopeToken: ["POST /applications/{client_id}/token/scoped"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: [ - "DELETE /app/installations/{installation_id}/suspended", - ], - updateWebhookConfigForApp: ["PATCH /app/hook/config"], - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: [ - "GET /users/{username}/settings/billing/actions", - ], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: [ - "GET /users/{username}/settings/billing/packages", - ], - getSharedStorageBillingOrg: [ - "GET /orgs/{org}/settings/billing/shared-storage", - ], - getSharedStorageBillingUser: [ - "GET /users/{username}/settings/billing/shared-storage", - ], - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs"], - createSuite: ["POST /repos/{owner}/{repo}/check-suites"], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: [ - "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", - ], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: [ - "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", - ], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestSuite: [ - "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", - ], - setSuitesPreferences: [ - "PATCH /repos/{owner}/{repo}/check-suites/preferences", - ], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"], - }, - codeScanning: { - deleteAnalysis: [ - "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}", - ], - getAlert: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", - {}, - { renamedParameters: { alert_id: "alert_number" } }, - ], - getAnalysis: [ - "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", - ], - getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", - ], - listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", - ], - uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"], - }, - codesOfConduct: { - getAllCodesOfConduct: [ - "GET /codes_of_conduct", - { mediaType: { previews: ["scarlet-witch"] } }, - ], - getConductCode: [ - "GET /codes_of_conduct/{key}", - { mediaType: { previews: ["scarlet-witch"] } }, - ], - getForRepo: [ - "GET /repos/{owner}/{repo}/community/code_of_conduct", - { mediaType: { previews: ["scarlet-witch"] } }, - ], - }, - emojis: { get: ["GET /emojis"] }, - enterpriseAdmin: { - disableSelectedOrganizationGithubActionsEnterprise: [ - "DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}", - ], - enableSelectedOrganizationGithubActionsEnterprise: [ - "PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}", - ], - getAllowedActionsEnterprise: [ - "GET /enterprises/{enterprise}/actions/permissions/selected-actions", - ], - getGithubActionsPermissionsEnterprise: [ - "GET /enterprises/{enterprise}/actions/permissions", - ], - listSelectedOrganizationsEnabledGithubActionsEnterprise: [ - "GET /enterprises/{enterprise}/actions/permissions/organizations", - ], - setAllowedActionsEnterprise: [ - "PUT /enterprises/{enterprise}/actions/permissions/selected-actions", - ], - setGithubActionsPermissionsEnterprise: [ - "PUT /enterprises/{enterprise}/actions/permissions", - ], - setSelectedOrganizationsEnabledGithubActionsEnterprise: [ - "PUT /enterprises/{enterprise}/actions/permissions/organizations", - ], - }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"], - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"], - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"], - }, - interactions: { - getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: [ - "GET /user/interaction-limits", - {}, - { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] }, - ], - removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: [ - "DELETE /repos/{owner}/{repo}/interaction-limits", - ], - removeRestrictionsForYourPublicRepos: [ - "DELETE /user/interaction-limits", - {}, - { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] }, - ], - setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: [ - "PUT /user/interaction-limits", - {}, - { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] }, - ], - }, - issues: { - addAssignees: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees", - ], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/comments", - ], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: [ - "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}", - ], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: [ - "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}", - ], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", - { mediaType: { previews: ["mockingbird"] } }, - ], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: [ - "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", - ], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", - ], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels", - ], - removeAssignees: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees", - ], - removeLabel: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}", - ], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: [ - "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}", - ], - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"], - }, - markdown: { - render: ["POST /markdown"], - renderRaw: [ - "POST /markdown/raw", - { headers: { "content-type": "text/plain; charset=utf-8" } }, - ], - }, - meta: { - get: ["GET /meta"], - getOctocat: ["GET /octocat"], - getZen: ["GET /zen"], - root: ["GET /"], - }, - migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: [ - "DELETE /user/migrations/{migration_id}/archive", - { mediaType: { previews: ["wyandotte"] } }, - ], - deleteArchiveForOrg: [ - "DELETE /orgs/{org}/migrations/{migration_id}/archive", - { mediaType: { previews: ["wyandotte"] } }, - ], - downloadArchiveForOrg: [ - "GET /orgs/{org}/migrations/{migration_id}/archive", - { mediaType: { previews: ["wyandotte"] } }, - ], - getArchiveForAuthenticatedUser: [ - "GET /user/migrations/{migration_id}/archive", - { mediaType: { previews: ["wyandotte"] } }, - ], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: [ - "GET /user/migrations/{migration_id}", - { mediaType: { previews: ["wyandotte"] } }, - ], - getStatusForOrg: [ - "GET /orgs/{org}/migrations/{migration_id}", - { mediaType: { previews: ["wyandotte"] } }, - ], - listForAuthenticatedUser: [ - "GET /user/migrations", - { mediaType: { previews: ["wyandotte"] } }, - ], - listForOrg: [ - "GET /orgs/{org}/migrations", - { mediaType: { previews: ["wyandotte"] } }, - ], - listReposForOrg: [ - "GET /orgs/{org}/migrations/{migration_id}/repositories", - { mediaType: { previews: ["wyandotte"] } }, - ], - listReposForUser: [ - "GET /user/migrations/{migration_id}/repositories", - { mediaType: { previews: ["wyandotte"] } }, - ], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: [ - "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", - { mediaType: { previews: ["wyandotte"] } }, - ], - unlockRepoForOrg: [ - "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", - { mediaType: { previews: ["wyandotte"] } }, - ], - updateImport: ["PATCH /repos/{owner}/{repo}/import"], - }, - orgs: { - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: [ - "PUT /orgs/{org}/outside_collaborators/{username}", - ], - createInvitation: ["POST /orgs/{org}/invitations"], - createWebhook: ["POST /orgs/{org}/hooks"], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - get: ["GET /orgs/{org}"], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations"], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: [ - "DELETE /orgs/{org}/outside_collaborators/{username}", - ], - removePublicMembershipForAuthenticatedUser: [ - "DELETE /orgs/{org}/public_members/{username}", - ], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: [ - "PUT /orgs/{org}/public_members/{username}", - ], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: [ - "PATCH /user/memberships/orgs/{org}", - ], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], - updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"], - }, - packages: { - deletePackageForAuthenticatedUser: [ - "DELETE /user/packages/{package_type}/{package_name}", - ], - deletePackageForOrg: [ - "DELETE /orgs/{org}/packages/{package_type}/{package_name}", - ], - deletePackageVersionForAuthenticatedUser: [ - "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}", - ], - deletePackageVersionForOrg: [ - "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", - ], - getAllPackageVersionsForAPackageOwnedByAnOrg: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", - {}, - { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] }, - ], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions", - {}, - { - renamed: [ - "packages", - "getAllPackageVersionsForPackageOwnedByAuthenticatedUser", - ], - }, - ], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions", - ], - getAllPackageVersionsForPackageOwnedByOrg: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", - ], - getAllPackageVersionsForPackageOwnedByUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}/versions", - ], - getPackageForAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}", - ], - getPackageForOrganization: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}", - ], - getPackageForUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}", - ], - getPackageVersionForAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}", - ], - getPackageVersionForOrganization: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", - ], - getPackageVersionForUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", - ], - restorePackageForAuthenticatedUser: [ - "POST /user/packages/{package_type}/{package_name}/restore{?token}", - ], - restorePackageForOrg: [ - "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}", - ], - restorePackageVersionForAuthenticatedUser: [ - "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", - ], - restorePackageVersionForOrg: [ - "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", - ], - }, - projects: { - addCollaborator: [ - "PUT /projects/{project_id}/collaborators/{username}", - { mediaType: { previews: ["inertia"] } }, - ], - createCard: [ - "POST /projects/columns/{column_id}/cards", - { mediaType: { previews: ["inertia"] } }, - ], - createColumn: [ - "POST /projects/{project_id}/columns", - { mediaType: { previews: ["inertia"] } }, - ], - createForAuthenticatedUser: [ - "POST /user/projects", - { mediaType: { previews: ["inertia"] } }, - ], - createForOrg: [ - "POST /orgs/{org}/projects", - { mediaType: { previews: ["inertia"] } }, - ], - createForRepo: [ - "POST /repos/{owner}/{repo}/projects", - { mediaType: { previews: ["inertia"] } }, - ], - delete: [ - "DELETE /projects/{project_id}", - { mediaType: { previews: ["inertia"] } }, - ], - deleteCard: [ - "DELETE /projects/columns/cards/{card_id}", - { mediaType: { previews: ["inertia"] } }, - ], - deleteColumn: [ - "DELETE /projects/columns/{column_id}", - { mediaType: { previews: ["inertia"] } }, - ], - get: [ - "GET /projects/{project_id}", - { mediaType: { previews: ["inertia"] } }, - ], - getCard: [ - "GET /projects/columns/cards/{card_id}", - { mediaType: { previews: ["inertia"] } }, - ], - getColumn: [ - "GET /projects/columns/{column_id}", - { mediaType: { previews: ["inertia"] } }, - ], - getPermissionForUser: [ - "GET /projects/{project_id}/collaborators/{username}/permission", - { mediaType: { previews: ["inertia"] } }, - ], - listCards: [ - "GET /projects/columns/{column_id}/cards", - { mediaType: { previews: ["inertia"] } }, - ], - listCollaborators: [ - "GET /projects/{project_id}/collaborators", - { mediaType: { previews: ["inertia"] } }, - ], - listColumns: [ - "GET /projects/{project_id}/columns", - { mediaType: { previews: ["inertia"] } }, - ], - listForOrg: [ - "GET /orgs/{org}/projects", - { mediaType: { previews: ["inertia"] } }, - ], - listForRepo: [ - "GET /repos/{owner}/{repo}/projects", - { mediaType: { previews: ["inertia"] } }, - ], - listForUser: [ - "GET /users/{username}/projects", - { mediaType: { previews: ["inertia"] } }, - ], - moveCard: [ - "POST /projects/columns/cards/{card_id}/moves", - { mediaType: { previews: ["inertia"] } }, - ], - moveColumn: [ - "POST /projects/columns/{column_id}/moves", - { mediaType: { previews: ["inertia"] } }, - ], - removeCollaborator: [ - "DELETE /projects/{project_id}/collaborators/{username}", - { mediaType: { previews: ["inertia"] } }, - ], - update: [ - "PATCH /projects/{project_id}", - { mediaType: { previews: ["inertia"] } }, - ], - updateCard: [ - "PATCH /projects/columns/cards/{card_id}", - { mediaType: { previews: ["inertia"] } }, - ], - updateColumn: [ - "PATCH /projects/columns/{column_id}", - { mediaType: { previews: ["inertia"] } }, - ], - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", - ], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments", - ], - deletePendingReview: [ - "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", - ], - deleteReviewComment: [ - "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}", - ], - dismissReview: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals", - ], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", - ], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", - ], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", - ], - listReviewComments: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", - ], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: [ - "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", - ], - requestReviewers: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", - ], - submitReview: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events", - ], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", - { mediaType: { previews: ["lydian"] } }, - ], - updateReview: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", - ], - updateReviewComment: [ - "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}", - ], - }, - rateLimit: { get: ["GET /rate_limit"] }, - reactions: { - createForCommitComment: [ - "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - createForIssue: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - createForIssueComment: [ - "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - createForPullRequestReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - createForTeamDiscussionCommentInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - createForTeamDiscussionInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteForCommitComment: [ - "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteForIssue: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteForIssueComment: [ - "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteForPullRequestComment: [ - "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteForTeamDiscussion: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteForTeamDiscussionComment: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - deleteLegacy: [ - "DELETE /reactions/{reaction_id}", - { mediaType: { previews: ["squirrel-girl"] } }, - { - deprecated: "octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy", - }, - ], - listForCommitComment: [ - "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - listForIssue: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - listForIssueComment: [ - "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - listForPullRequestReviewComment: [ - "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - listForTeamDiscussionCommentInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - listForTeamDiscussionInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", - { mediaType: { previews: ["squirrel-girl"] } }, - ], - }, - repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" }, - ], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" }, - ], - addTeamAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" }, - ], - addUserAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" }, - ], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: [ - "GET /repos/{owner}/{repo}/vulnerability-alerts", - { mediaType: { previews: ["dorian"] } }, - ], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - createCommitComment: [ - "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments", - ], - createCommitSignatureProtection: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", - { mediaType: { previews: ["zzzax"] } }, - ], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: [ - "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", - ], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateEnvironment: [ - "PUT /repos/{owner}/{repo}/environments/{environment_name}", - ], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: [ - "POST /repos/{owner}/{repo}/pages", - { mediaType: { previews: ["switcheroo"] } }, - ], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createUsingTemplate: [ - "POST /repos/{template_owner}/{template_repo}/generate", - { mediaType: { previews: ["baptiste"] } }, - ], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", - ], - deleteAdminBranchProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", - ], - deleteAnEnvironment: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}", - ], - deleteBranchProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection", - ], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", - { mediaType: { previews: ["zzzax"] } }, - ], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: [ - "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}", - ], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: [ - "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}", - ], - deletePagesSite: [ - "DELETE /repos/{owner}/{repo}/pages", - { mediaType: { previews: ["switcheroo"] } }, - ], - deletePullRequestReviewProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", - ], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: [ - "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}", - ], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: [ - "DELETE /repos/{owner}/{repo}/automated-security-fixes", - { mediaType: { previews: ["london"] } }, - ], - disableVulnerabilityAlerts: [ - "DELETE /repos/{owner}/{repo}/vulnerability-alerts", - { mediaType: { previews: ["dorian"] } }, - ], - downloadArchive: [ - "GET /repos/{owner}/{repo}/zipball/{ref}", - {}, - { renamed: ["repos", "downloadZipballArchive"] }, - ], - downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], - downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: [ - "PUT /repos/{owner}/{repo}/automated-security-fixes", - { mediaType: { previews: ["london"] } }, - ], - enableVulnerabilityAlerts: [ - "PUT /repos/{owner}/{repo}/vulnerability-alerts", - { mediaType: { previews: ["dorian"] } }, - ], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", - ], - getAdminBranchProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", - ], - getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - ], - getAllTopics: [ - "GET /repos/{owner}/{repo}/topics", - { mediaType: { previews: ["mercy"] } }, - ], - getAppsWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - ], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection", - ], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: [ - "GET /repos/{owner}/{repo}/collaborators/{username}/permission", - ], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", - { mediaType: { previews: ["zzzax"] } }, - ], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: [ - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}", - ], - getEnvironment: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}", - ], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", - ], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", - ], - getTeamsWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - ], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - ], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/config", - ], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", - { mediaType: { previews: ["groot"] } }, - ], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", - ], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: [ - "GET /repos/{owner}/{repo}/commits/{ref}/statuses", - ], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: [ - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", - ], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", - { mediaType: { previews: ["groot"] } }, - ], - listReleaseAssets: [ - "GET /repos/{owner}/{repo}/releases/{release_id}/assets", - ], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - removeAppAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" }, - ], - removeCollaborator: [ - "DELETE /repos/{owner}/{repo}/collaborators/{username}", - ], - removeStatusCheckContexts: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" }, - ], - removeStatusCheckProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", - ], - removeTeamAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" }, - ], - removeUserAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" }, - ], - renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], - replaceAllTopics: [ - "PUT /repos/{owner}/{repo}/topics", - { mediaType: { previews: ["mercy"] } }, - ], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", - ], - setAppAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" }, - ], - setStatusCheckContexts: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" }, - ], - setTeamAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" }, - ], - setUserAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" }, - ], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection", - ], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: [ - "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}", - ], - updatePullRequestReviewProtection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", - ], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: [ - "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}", - ], - updateStatusCheckPotection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", - {}, - { renamed: ["repos", "updateStatusCheckProtection"] }, - ], - updateStatusCheckProtection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", - ], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: [ - "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config", - ], - uploadReleaseAsset: [ - "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", - { baseUrl: "https://uploads.github.com" }, - ], - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits", { mediaType: { previews: ["cloak"] } }], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics", { mediaType: { previews: ["mercy"] } }], - users: ["GET /search/users"], - }, - secretScanning: { - getAlert: [ - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", - ], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", - ], - }, - teams: { - addOrUpdateMembershipForUserInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}", - ], - addOrUpdateProjectPermissionsInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", - { mediaType: { previews: ["inertia"] } }, - ], - addOrUpdateRepoPermissionsInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", - ], - checkPermissionsForProjectInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", - { mediaType: { previews: ["inertia"] } }, - ], - checkPermissionsForRepoInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", - ], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", - ], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", - ], - deleteDiscussionInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", - ], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", - ], - getDiscussionInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", - ], - getMembershipForUserInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/memberships/{username}", - ], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", - ], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/invitations", - ], - listProjectsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/projects", - { mediaType: { previews: ["inertia"] } }, - ], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}", - ], - removeProjectInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}", - ], - removeRepoInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", - ], - updateDiscussionCommentInOrg: [ - "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", - ], - updateDiscussionInOrg: [ - "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", - ], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"], - }, - users: { - addEmailForAuthenticated: ["POST /user/emails"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], - list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"], - }, -}; - -const VERSION = "4.15.1"; - -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ method, url }, defaults); - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - const scopeMethods = newMethods[scope]; - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); - } - } - return newMethods; -} -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); - // There are currently no other decorations than `.mapToData` - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: undefined, - }); - return requestWithDefaults(options); - } - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); - } - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - if (!(alias in options)) { - options[alias] = options[name]; - } - delete options[name]; - } - } - return requestWithDefaults(options); - } - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - return requestWithDefaults(...args); - } - return Object.assign(withDecorations, requestWithDefaults); -} - -function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit, Endpoints); - return { - ...api, - rest: api, - }; -} -restEndpointMethods.VERSION = VERSION; - -export { restEndpointMethods }; -//# sourceMappingURL=index.js.map diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map deleted file mode 100644 index fea8b2a0..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sources":["../dist-src/generated/endpoints.js","../dist-src/version.js","../dist-src/endpoints-to-methods.js","../dist-src/index.js"],"sourcesContent":["const Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\",\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\",\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\",\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\",\n ],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\",\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\",\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\",\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\",\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\",\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\",\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\",\n ],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\",\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\",\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\",\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] },\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\",\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\",\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\",\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\",\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\",\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\",\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\",\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\",\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\",\n ],\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\",\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\",\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\",\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\",\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\",\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\",\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"],\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\n \"POST /content_references/{content_reference_id}/attachments\",\n { mediaType: { previews: [\"corsair\"] } },\n ],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\",\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\",\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\",\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\",\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\",\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\",\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"],\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\",\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\",\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\",\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\",\n ],\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\",\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\",\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } },\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\",\n ],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"],\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\n \"GET /codes_of_conduct\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n getConductCode: [\n \"GET /codes_of_conduct/{key}\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n getForRepo: [\n \"GET /repos/{owner}/{repo}/community/code_of_conduct\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n },\n emojis: { get: [\"GET /emojis\"] },\n enterpriseAdmin: {\n disableSelectedOrganizationGithubActionsEnterprise: [\n \"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n enableSelectedOrganizationGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n getAllowedActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n getGithubActionsPermissionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions\",\n ],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n setAllowedActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n setGithubActionsPermissionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions\",\n ],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"],\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"],\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"],\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] },\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\",\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] },\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] },\n ],\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\",\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n { mediaType: { previews: [\"mockingbird\"] } },\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\",\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"],\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } },\n ],\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"],\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getStatusForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listForAuthenticatedUser: [\n \"GET /user/migrations\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listForOrg: [\n \"GET /orgs/{org}/migrations\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listReposForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"],\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\",\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\",\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\",\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\",\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\",\n ],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"],\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\",\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] },\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\",\n ],\n },\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\",\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\",\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\",\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n },\n projects: {\n addCollaborator: [\n \"PUT /projects/{project_id}/collaborators/{username}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createCard: [\n \"POST /projects/columns/{column_id}/cards\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createColumn: [\n \"POST /projects/{project_id}/columns\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForAuthenticatedUser: [\n \"POST /user/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForOrg: [\n \"POST /orgs/{org}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForRepo: [\n \"POST /repos/{owner}/{repo}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n delete: [\n \"DELETE /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n deleteCard: [\n \"DELETE /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n deleteColumn: [\n \"DELETE /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n get: [\n \"GET /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getCard: [\n \"GET /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getColumn: [\n \"GET /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listCards: [\n \"GET /projects/columns/{column_id}/cards\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listCollaborators: [\n \"GET /projects/{project_id}/collaborators\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listColumns: [\n \"GET /projects/{project_id}/columns\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForOrg: [\n \"GET /orgs/{org}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForRepo: [\n \"GET /repos/{owner}/{repo}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForUser: [\n \"GET /users/{username}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n moveCard: [\n \"POST /projects/columns/cards/{card_id}/moves\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n moveColumn: [\n \"POST /projects/columns/{column_id}/moves\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n update: [\n \"PATCH /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n updateCard: [\n \"PATCH /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n updateColumn: [\n \"PATCH /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\",\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\",\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\",\n { mediaType: { previews: [\"lydian\"] } },\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteLegacy: [\n \"DELETE /reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n {\n deprecated: \"octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy\",\n },\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\n \"POST /repos/{owner}/{repo}/pages\",\n { mediaType: { previews: [\"switcheroo\"] } },\n ],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\",\n { mediaType: { previews: [\"baptiste\"] } },\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\",\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n deletePagesSite: [\n \"DELETE /repos/{owner}/{repo}/pages\",\n { mediaType: { previews: [\"switcheroo\"] } },\n ],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\",\n { mediaType: { previews: [\"london\"] } },\n ],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] },\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\",\n { mediaType: { previews: [\"london\"] } },\n ],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n ],\n getAllTopics: [\n \"GET /repos/{owner}/{repo}/topics\",\n { mediaType: { previews: [\"mercy\"] } },\n ],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n ],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\",\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\",\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\",\n { mediaType: { previews: [\"groot\"] } },\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n { mediaType: { previews: [\"groot\"] } },\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\",\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\n \"PUT /repos/{owner}/{repo}/topics\",\n { mediaType: { previews: [\"mercy\"] } },\n ],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] },\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" },\n ],\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\", { mediaType: { previews: [\"cloak\"] } }],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", { mediaType: { previews: [\"mercy\"] } }],\n users: [\"GET /search/users\"],\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n ],\n listProjectsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"],\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"],\n },\n};\nexport default Endpoints;\n","export const VERSION = \"4.15.1\";\n","export function endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({ method, url }, defaults);\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n const scopeMethods = newMethods[scope];\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args);\n // There are currently no other decorations than `.mapToData`\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined,\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n delete options[name];\n }\n }\n return requestWithDefaults(options);\n }\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n","import ENDPOINTS from \"./generated/endpoints\";\nimport { VERSION } from \"./version\";\nimport { endpointsToMethods } from \"./endpoints-to-methods\";\nexport function restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n ...api,\n rest: api,\n };\n}\nrestEndpointMethods.VERSION = VERSION;\n"],"names":["ENDPOINTS"],"mappings":"AAAA,MAAM,SAAS,GAAG;AAClB,IAAI,OAAO,EAAE;AACb,QAAQ,0BAA0B,EAAE;AACpC,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,+CAA+C,CAAC;AAClF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,+CAA+C,CAAC;AAClF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,uEAAuE;AACnF,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,4FAA4F;AACxG,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,kDAAkD,CAAC;AAC7E,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,oDAAoD,CAAC;AACjF,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,kDAAkD,EAAE;AAC5D,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,iDAAiD,EAAE;AAC3D,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,2DAA2D,CAAC;AAClF,QAAQ,uBAAuB,EAAE;AACjC,YAAY,sFAAsF;AAClG,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,uCAAuC,EAAE;AACjD,YAAY,qCAAqC;AACjD,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,iDAAiD,CAAC;AACjF,QAAQ,eAAe,EAAE,CAAC,4CAA4C,CAAC;AACvE,QAAQ,YAAY,EAAE,CAAC,+CAA+C,CAAC;AACvE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,+CAA+C;AAC3D,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,SAAS,EAAE,uCAAuC,CAAC,EAAE;AAC7E,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,sDAAsD,CAAC;AAClF,QAAQ,aAAa,EAAE,CAAC,yDAAyD,CAAC;AAClF,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,6CAA6C,CAAC;AAClF,QAAQ,0BAA0B,EAAE;AACpC,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,2DAA2D,CAAC;AAClF,QAAQ,cAAc,EAAE,CAAC,iDAAiD,CAAC;AAC3E,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,6CAA6C,CAAC;AAC7E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,iCAAiC,CAAC;AAC3D,QAAQ,eAAe,EAAE,CAAC,2CAA2C,CAAC;AACtE,QAAQ,iBAAiB,EAAE,CAAC,6CAA6C,CAAC;AAC1E,QAAQ,4BAA4B,EAAE,CAAC,2CAA2C,CAAC;AACnF,QAAQ,6BAA6B,EAAE;AACvC,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,wDAAwD,EAAE;AAClE,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,2BAA2B,EAAE,CAAC,iCAAiC,CAAC;AACxE,QAAQ,4BAA4B,EAAE,CAAC,2CAA2C,CAAC;AACnF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,wCAAwC,CAAC;AAC3E,QAAQ,aAAa,EAAE,CAAC,wDAAwD,CAAC;AACjF,QAAQ,+BAA+B,EAAE;AACzC,YAAY,+EAA+E;AAC3F,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,uCAAuC,EAAE;AACjD,YAAY,qCAAqC;AACjD,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,uDAAuD,EAAE;AACjE,YAAY,kDAAkD;AAC9D,SAAS;AACT,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,qCAAqC,EAAE,CAAC,kCAAkC,CAAC;AACnF,QAAQ,sBAAsB,EAAE,CAAC,2CAA2C,CAAC;AAC7E,QAAQ,wBAAwB,EAAE;AAClC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,QAAQ,EAAE,CAAC,YAAY,CAAC;AAChC,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,SAAS,EAAE,CAAC,wCAAwC,CAAC;AAC7D,QAAQ,yCAAyC,EAAE;AACnD,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,8BAA8B,EAAE,CAAC,8BAA8B,CAAC;AACxE,QAAQ,qCAAqC,EAAE,CAAC,oBAAoB,CAAC;AACrE,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,aAAa,CAAC;AACzC,QAAQ,8BAA8B,EAAE,CAAC,qCAAqC,CAAC;AAC/E,QAAQ,uBAAuB,EAAE,CAAC,qCAAqC,CAAC;AACxE,QAAQ,mBAAmB,EAAE,CAAC,wBAAwB,CAAC;AACvD,QAAQ,yBAAyB,EAAE,CAAC,uCAAuC,CAAC;AAC5E,QAAQ,+BAA+B,EAAE;AACzC,YAAY,8CAA8C;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,kCAAkC,CAAC;AAC5D,QAAQ,yCAAyC,EAAE;AACnD,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,mCAAmC,EAAE,CAAC,mBAAmB,CAAC;AAClE,QAAQ,sBAAsB,EAAE,CAAC,+BAA+B,CAAC;AACjE,QAAQ,sBAAsB,EAAE,CAAC,qCAAqC,CAAC;AACvE,QAAQ,qBAAqB,EAAE,CAAC,sCAAsC,CAAC;AACvE,QAAQ,oCAAoC,EAAE,CAAC,yBAAyB,CAAC;AACzE,QAAQ,mBAAmB,EAAE,CAAC,uCAAuC,CAAC;AACtE,QAAQ,uBAAuB,EAAE,CAAC,oBAAoB,CAAC;AACvD,QAAQ,2BAA2B,EAAE,CAAC,yCAAyC,CAAC;AAChF,QAAQ,gBAAgB,EAAE,CAAC,0CAA0C,CAAC;AACtE,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,4BAA4B,EAAE,CAAC,kCAAkC,CAAC;AAC1E,QAAQ,8BAA8B,EAAE,CAAC,qCAAqC,CAAC;AAC/E,KAAK;AACL,IAAI,IAAI,EAAE;AACV,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,sCAAsC,CAAC;AAC5D,QAAQ,uBAAuB,EAAE;AACjC,YAAY,6DAA6D;AACzE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,wCAAwC,CAAC;AACtE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,kBAAkB,EAAE,CAAC,6CAA6C,CAAC;AAC3E,QAAQ,WAAW,EAAE,CAAC,wCAAwC,CAAC;AAC/D,QAAQ,gBAAgB,EAAE,CAAC,UAAU,CAAC;AACtC,QAAQ,SAAS,EAAE,CAAC,sBAAsB,CAAC;AAC3C,QAAQ,eAAe,EAAE,CAAC,0CAA0C,CAAC;AACrE,QAAQ,kBAAkB,EAAE,CAAC,8BAA8B,CAAC;AAC5D,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,oCAAoC,CAAC;AACnE,QAAQ,sBAAsB,EAAE,CAAC,sBAAsB,CAAC;AACxD,QAAQ,mBAAmB,EAAE,CAAC,mDAAmD,CAAC;AAClF,QAAQ,0BAA0B,EAAE;AACpC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,wBAAwB,CAAC;AACrD,QAAQ,qCAAqC,EAAE,CAAC,yBAAyB,CAAC;AAC1E,QAAQ,SAAS,EAAE,CAAC,gCAAgC,CAAC;AACrD,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,iCAAiC,EAAE,CAAC,gCAAgC,CAAC;AAC7E,QAAQ,qCAAqC,EAAE,CAAC,iCAAiC,CAAC;AAClF,QAAQ,4CAA4C,EAAE;AACtD,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,uCAAuC,CAAC;AAC7D,QAAQ,6BAA6B,EAAE,CAAC,4BAA4B,CAAC;AACrE,QAAQ,UAAU,EAAE,CAAC,6CAA6C,CAAC;AACnE,QAAQ,mBAAmB,EAAE,CAAC,oDAAoD,CAAC;AACnF,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,wBAAwB,CAAC;AAC7D,KAAK;AACL,IAAI,OAAO,EAAE;AACb,QAAQ,0BAA0B,EAAE,CAAC,0CAA0C,CAAC;AAChF,QAAQ,2BAA2B,EAAE;AACrC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,2BAA2B,EAAE,CAAC,2CAA2C,CAAC;AAClF,QAAQ,4BAA4B,EAAE;AACtC,YAAY,iDAAiD;AAC7D,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,iDAAiD;AAC7D,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,uDAAuD;AACnE,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE;AACZ,QAAQ,MAAM,EAAE,CAAC,uCAAuC,CAAC;AACzD,QAAQ,WAAW,EAAE,CAAC,yCAAyC,CAAC;AAChE,QAAQ,GAAG,EAAE,CAAC,qDAAqD,CAAC;AACpE,QAAQ,QAAQ,EAAE,CAAC,yDAAyD,CAAC;AAC7E,QAAQ,eAAe,EAAE;AACzB,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,oDAAoD,CAAC;AAC1E,QAAQ,YAAY,EAAE;AACtB,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,sDAAsD,CAAC;AAClF,QAAQ,cAAc,EAAE;AACxB,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,uDAAuD,CAAC;AACzE,KAAK;AACL,IAAI,YAAY,EAAE;AAClB,QAAQ,cAAc,EAAE;AACxB,YAAY,oFAAoF;AAChG,SAAS;AACT,QAAQ,QAAQ,EAAE;AAClB,YAAY,+DAA+D;AAC3E,YAAY,EAAE;AACd,YAAY,EAAE,iBAAiB,EAAE,EAAE,QAAQ,EAAE,cAAc,EAAE,EAAE;AAC/D,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,QAAQ,EAAE,CAAC,2DAA2D,CAAC;AAC/E,QAAQ,iBAAiB,EAAE,CAAC,gDAAgD,CAAC;AAC7E,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,yEAAyE;AACrF,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,kDAAkD,CAAC;AAChF,QAAQ,WAAW,EAAE;AACrB,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,iDAAiD,CAAC;AACxE,KAAK;AACL,IAAI,cAAc,EAAE;AACpB,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,uBAAuB;AACnC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,6BAA6B;AACzC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,qDAAqD;AACjE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE,EAAE,GAAG,EAAE,CAAC,aAAa,CAAC,EAAE;AACpC,IAAI,eAAe,EAAE;AACrB,QAAQ,kDAAkD,EAAE;AAC5D,YAAY,6EAA6E;AACzF,SAAS;AACT,QAAQ,iDAAiD,EAAE;AAC3D,YAAY,0EAA0E;AACtF,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,mDAAmD;AAC/D,SAAS;AACT,QAAQ,uDAAuD,EAAE;AACjE,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,mDAAmD;AAC/D,SAAS;AACT,QAAQ,sDAAsD,EAAE;AAChE,YAAY,iEAAiE;AAC7E,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,cAAc,EAAE,CAAC,2BAA2B,CAAC;AACrD,QAAQ,MAAM,EAAE,CAAC,aAAa,CAAC;AAC/B,QAAQ,aAAa,EAAE,CAAC,gCAAgC,CAAC;AACzD,QAAQ,MAAM,EAAE,CAAC,yBAAyB,CAAC;AAC3C,QAAQ,aAAa,EAAE,CAAC,+CAA+C,CAAC;AACxE,QAAQ,IAAI,EAAE,CAAC,6BAA6B,CAAC;AAC7C,QAAQ,GAAG,EAAE,CAAC,sBAAsB,CAAC;AACrC,QAAQ,UAAU,EAAE,CAAC,4CAA4C,CAAC;AAClE,QAAQ,WAAW,EAAE,CAAC,4BAA4B,CAAC;AACnD,QAAQ,IAAI,EAAE,CAAC,YAAY,CAAC;AAC5B,QAAQ,YAAY,EAAE,CAAC,+BAA+B,CAAC;AACvD,QAAQ,WAAW,EAAE,CAAC,8BAA8B,CAAC;AACrD,QAAQ,WAAW,EAAE,CAAC,6BAA6B,CAAC;AACpD,QAAQ,SAAS,EAAE,CAAC,4BAA4B,CAAC;AACjD,QAAQ,UAAU,EAAE,CAAC,mBAAmB,CAAC;AACzC,QAAQ,WAAW,EAAE,CAAC,oBAAoB,CAAC;AAC3C,QAAQ,IAAI,EAAE,CAAC,2BAA2B,CAAC;AAC3C,QAAQ,MAAM,EAAE,CAAC,8BAA8B,CAAC;AAChD,QAAQ,MAAM,EAAE,CAAC,wBAAwB,CAAC;AAC1C,QAAQ,aAAa,EAAE,CAAC,8CAA8C,CAAC;AACvE,KAAK;AACL,IAAI,GAAG,EAAE;AACT,QAAQ,UAAU,EAAE,CAAC,sCAAsC,CAAC;AAC5D,QAAQ,YAAY,EAAE,CAAC,wCAAwC,CAAC;AAChE,QAAQ,SAAS,EAAE,CAAC,qCAAqC,CAAC;AAC1D,QAAQ,SAAS,EAAE,CAAC,qCAAqC,CAAC;AAC1D,QAAQ,UAAU,EAAE,CAAC,sCAAsC,CAAC;AAC5D,QAAQ,SAAS,EAAE,CAAC,6CAA6C,CAAC;AAClE,QAAQ,OAAO,EAAE,CAAC,gDAAgD,CAAC;AACnE,QAAQ,SAAS,EAAE,CAAC,oDAAoD,CAAC;AACzE,QAAQ,MAAM,EAAE,CAAC,yCAAyC,CAAC;AAC3D,QAAQ,MAAM,EAAE,CAAC,8CAA8C,CAAC;AAChE,QAAQ,OAAO,EAAE,CAAC,gDAAgD,CAAC;AACnE,QAAQ,gBAAgB,EAAE,CAAC,mDAAmD,CAAC;AAC/E,QAAQ,SAAS,EAAE,CAAC,4CAA4C,CAAC;AACjE,KAAK;AACL,IAAI,SAAS,EAAE;AACf,QAAQ,eAAe,EAAE,CAAC,0BAA0B,CAAC;AACrD,QAAQ,WAAW,EAAE,CAAC,iCAAiC,CAAC;AACxD,KAAK;AACL,IAAI,YAAY,EAAE;AAClB,QAAQ,mCAAmC,EAAE,CAAC,8BAA8B,CAAC;AAC7E,QAAQ,qBAAqB,EAAE,CAAC,oCAAoC,CAAC;AACrE,QAAQ,sBAAsB,EAAE,CAAC,8CAA8C,CAAC;AAChF,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,8BAA8B;AAC1C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,qCAAqC,CAAC,EAAE;AAChF,SAAS;AACT,QAAQ,sCAAsC,EAAE,CAAC,iCAAiC,CAAC;AACnF,QAAQ,wBAAwB,EAAE,CAAC,uCAAuC,CAAC;AAC3E,QAAQ,yBAAyB,EAAE;AACnC,YAAY,iDAAiD;AAC7D,SAAS;AACT,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,iCAAiC;AAC7C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,wCAAwC,CAAC,EAAE;AACnF,SAAS;AACT,QAAQ,mCAAmC,EAAE,CAAC,8BAA8B,CAAC;AAC7E,QAAQ,qBAAqB,EAAE,CAAC,oCAAoC,CAAC;AACrE,QAAQ,sBAAsB,EAAE,CAAC,8CAA8C,CAAC;AAChF,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,8BAA8B;AAC1C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,qCAAqC,CAAC,EAAE;AAChF,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE;AACZ,QAAQ,YAAY,EAAE;AACtB,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,yDAAyD,CAAC;AAC9E,QAAQ,sBAAsB,EAAE,CAAC,gDAAgD,CAAC;AAClF,QAAQ,MAAM,EAAE,CAAC,mCAAmC,CAAC;AACrD,QAAQ,aAAa,EAAE;AACvB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,mCAAmC,CAAC;AAC1D,QAAQ,eAAe,EAAE,CAAC,uCAAuC,CAAC;AAClE,QAAQ,aAAa,EAAE;AACvB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,4CAA4C,CAAC;AACnE,QAAQ,eAAe,EAAE;AACzB,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,GAAG,EAAE,CAAC,iDAAiD,CAAC;AAChE,QAAQ,UAAU,EAAE,CAAC,wDAAwD,CAAC;AAC9E,QAAQ,QAAQ,EAAE,CAAC,oDAAoD,CAAC;AACxE,QAAQ,QAAQ,EAAE,CAAC,yCAAyC,CAAC;AAC7D,QAAQ,YAAY,EAAE,CAAC,yDAAyD,CAAC;AACjF,QAAQ,IAAI,EAAE,CAAC,aAAa,CAAC;AAC7B,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,YAAY,EAAE,CAAC,0DAA0D,CAAC;AAClF,QAAQ,mBAAmB,EAAE,CAAC,2CAA2C,CAAC;AAC1E,QAAQ,UAAU,EAAE,CAAC,wDAAwD,CAAC;AAC9E,QAAQ,iBAAiB,EAAE,CAAC,yCAAyC,CAAC;AACtE,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,0DAA0D;AACtE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,aAAa,CAAC,EAAE,EAAE;AACxD,SAAS;AACT,QAAQ,wBAAwB,EAAE,CAAC,kBAAkB,CAAC;AACtD,QAAQ,UAAU,EAAE,CAAC,wBAAwB,CAAC;AAC9C,QAAQ,WAAW,EAAE,CAAC,kCAAkC,CAAC;AACzD,QAAQ,sBAAsB,EAAE;AAChC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,kCAAkC,CAAC;AAC/D,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,sCAAsC,CAAC;AAChE,QAAQ,IAAI,EAAE,CAAC,sDAAsD,CAAC;AACtE,QAAQ,eAAe,EAAE;AACzB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,wDAAwD,CAAC;AAC7E,QAAQ,MAAM,EAAE,CAAC,yDAAyD,CAAC;AAC3E,QAAQ,MAAM,EAAE,CAAC,mDAAmD,CAAC;AACrE,QAAQ,aAAa,EAAE,CAAC,0DAA0D,CAAC;AACnF,QAAQ,WAAW,EAAE,CAAC,2CAA2C,CAAC;AAClE,QAAQ,eAAe,EAAE;AACzB,YAAY,2DAA2D;AACvE,SAAS;AACT,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,GAAG,EAAE,CAAC,yBAAyB,CAAC;AACxC,QAAQ,kBAAkB,EAAE,CAAC,eAAe,CAAC;AAC7C,QAAQ,UAAU,EAAE,CAAC,mCAAmC,CAAC;AACzD,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,MAAM,EAAE,CAAC,gBAAgB,CAAC;AAClC,QAAQ,SAAS,EAAE;AACnB,YAAY,oBAAoB;AAChC,YAAY,EAAE,OAAO,EAAE,EAAE,cAAc,EAAE,2BAA2B,EAAE,EAAE;AACxE,SAAS;AACT,KAAK;AACL,IAAI,IAAI,EAAE;AACV,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC;AAC1B,QAAQ,UAAU,EAAE,CAAC,cAAc,CAAC;AACpC,QAAQ,MAAM,EAAE,CAAC,UAAU,CAAC;AAC5B,QAAQ,IAAI,EAAE,CAAC,OAAO,CAAC;AACvB,KAAK;AACL,IAAI,UAAU,EAAE;AAChB,QAAQ,YAAY,EAAE,CAAC,qCAAqC,CAAC;AAC7D,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,gDAAgD;AAC5D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,sDAAsD;AAClE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,mDAAmD;AAC/D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,6CAA6C;AACzD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,0CAA0C,CAAC;AACtE,QAAQ,eAAe,EAAE,CAAC,kCAAkC,CAAC;AAC7D,QAAQ,aAAa,EAAE,CAAC,8CAA8C,CAAC;AACvE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,qCAAqC;AACjD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,2CAA2C;AACvD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,sBAAsB;AAClC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,4BAA4B;AACxC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,wDAAwD;AACpE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,kDAAkD;AAC9D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,wDAAwD,CAAC;AACnF,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,yBAAyB,EAAE,CAAC,uBAAuB,CAAC;AAC5D,QAAQ,WAAW,EAAE,CAAC,6BAA6B,CAAC;AACpD,QAAQ,WAAW,EAAE,CAAC,kCAAkC,CAAC;AACzD,QAAQ,8BAA8B,EAAE;AACxC,YAAY,+DAA+D;AAC3E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,qEAAqE;AACjF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,oCAAoC,CAAC;AAC5D,KAAK;AACL,IAAI,IAAI,EAAE;AACV,QAAQ,SAAS,EAAE,CAAC,mCAAmC,CAAC;AACxD,QAAQ,gBAAgB,EAAE,CAAC,gDAAgD,CAAC;AAC5E,QAAQ,gBAAgB,EAAE,CAAC,mCAAmC,CAAC;AAC/D,QAAQ,sBAAsB,EAAE,CAAC,oCAAoC,CAAC;AACtE,QAAQ,4BAA4B,EAAE,CAAC,2CAA2C,CAAC;AACnF,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,8BAA8B,CAAC;AAC1D,QAAQ,aAAa,EAAE,CAAC,wBAAwB,CAAC;AACjD,QAAQ,aAAa,EAAE,CAAC,oCAAoC,CAAC;AAC7D,QAAQ,GAAG,EAAE,CAAC,iBAAiB,CAAC;AAChC,QAAQ,iCAAiC,EAAE,CAAC,kCAAkC,CAAC;AAC/E,QAAQ,oBAAoB,EAAE,CAAC,wCAAwC,CAAC;AACxE,QAAQ,UAAU,EAAE,CAAC,iCAAiC,CAAC;AACvD,QAAQ,sBAAsB,EAAE,CAAC,wCAAwC,CAAC;AAC1E,QAAQ,IAAI,EAAE,CAAC,oBAAoB,CAAC;AACpC,QAAQ,oBAAoB,EAAE,CAAC,+BAA+B,CAAC;AAC/D,QAAQ,gBAAgB,EAAE,CAAC,wBAAwB,CAAC;AACpD,QAAQ,qBAAqB,EAAE,CAAC,oCAAoC,CAAC;AACrE,QAAQ,wBAAwB,EAAE,CAAC,gBAAgB,CAAC;AACpD,QAAQ,WAAW,EAAE,CAAC,4BAA4B,CAAC;AACnD,QAAQ,mBAAmB,EAAE,CAAC,mDAAmD,CAAC;AAClF,QAAQ,WAAW,EAAE,CAAC,yBAAyB,CAAC;AAChD,QAAQ,mCAAmC,EAAE,CAAC,4BAA4B,CAAC;AAC3E,QAAQ,wBAAwB,EAAE,CAAC,uCAAuC,CAAC;AAC3E,QAAQ,sBAAsB,EAAE,CAAC,6BAA6B,CAAC;AAC/D,QAAQ,iBAAiB,EAAE,CAAC,gCAAgC,CAAC;AAC7D,QAAQ,YAAY,EAAE,CAAC,uBAAuB,CAAC;AAC/C,QAAQ,WAAW,EAAE,CAAC,wCAAwC,CAAC;AAC/D,QAAQ,YAAY,EAAE,CAAC,uCAAuC,CAAC;AAC/D,QAAQ,uBAAuB,EAAE,CAAC,2CAA2C,CAAC;AAC9E,QAAQ,yBAAyB,EAAE;AACnC,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,0CAA0C,EAAE;AACpD,YAAY,8CAA8C;AAC1D,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,wCAAwC,CAAC;AACxE,QAAQ,uCAAuC,EAAE;AACjD,YAAY,2CAA2C;AACvD,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,sCAAsC,CAAC;AAC7D,QAAQ,MAAM,EAAE,CAAC,mBAAmB,CAAC;AACrC,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,oCAAoC;AAChD,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,mCAAmC,CAAC;AAC5D,QAAQ,yBAAyB,EAAE,CAAC,0CAA0C,CAAC;AAC/E,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,wCAAwC,EAAE;AAClD,YAAY,mFAAmF;AAC/F,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,4CAA4C,EAAE;AACtD,YAAY,iEAAiE;AAC7E,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,UAAU,EAAE,2CAA2C,CAAC,EAAE;AAClF,SAAS;AACT,QAAQ,2DAA2D,EAAE;AACrE,YAAY,2DAA2D;AACvE,YAAY,EAAE;AACd,YAAY;AACZ,gBAAgB,OAAO,EAAE;AACzB,oBAAoB,UAAU;AAC9B,oBAAoB,yDAAyD;AAC7E,iBAAiB;AACjB,aAAa;AACb,SAAS;AACT,QAAQ,uDAAuD,EAAE;AACjE,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,0CAA0C,EAAE;AACpD,YAAY,uEAAuE;AACnF,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,gFAAgF;AAC5F,SAAS;AACT,QAAQ,gCAAgC,EAAE;AAC1C,YAAY,sFAAsF;AAClG,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,4FAA4F;AACxG,SAAS;AACT,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,yEAAyE;AACrF,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,+FAA+F;AAC3G,SAAS;AACT,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,eAAe,EAAE;AACzB,YAAY,qDAAqD;AACjE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,0CAA0C;AACtD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,qCAAqC;AACjD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,qBAAqB;AACjC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,2BAA2B;AACvC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,aAAa,EAAE;AACvB,YAAY,qCAAqC;AACjD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,MAAM,EAAE;AAChB,YAAY,+BAA+B;AAC3C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,0CAA0C;AACtD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,sCAAsC;AAClD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,GAAG,EAAE;AACb,YAAY,4BAA4B;AACxC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,OAAO,EAAE;AACjB,YAAY,uCAAuC;AACnD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,SAAS,EAAE;AACnB,YAAY,mCAAmC;AAC/C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,gEAAgE;AAC5E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,SAAS,EAAE;AACnB,YAAY,yCAAyC;AACrD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,0CAA0C;AACtD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,oCAAoC;AAChD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,0BAA0B;AACtC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,oCAAoC;AAChD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,gCAAgC;AAC5C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,QAAQ,EAAE;AAClB,YAAY,8CAA8C;AAC1D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,0CAA0C;AACtD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,wDAAwD;AACpE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,MAAM,EAAE;AAChB,YAAY,8BAA8B;AAC1C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,yCAAyC;AACrD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,qCAAqC;AACjD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,aAAa,EAAE,CAAC,qDAAqD,CAAC;AAC9E,QAAQ,MAAM,EAAE,CAAC,kCAAkC,CAAC;AACpD,QAAQ,2BAA2B,EAAE;AACrC,YAAY,8EAA8E;AAC1F,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,wDAAwD,CAAC;AAChF,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,aAAa,EAAE;AACvB,YAAY,8EAA8E;AAC1F,SAAS;AACT,QAAQ,GAAG,EAAE,CAAC,+CAA+C,CAAC;AAC9D,QAAQ,SAAS,EAAE;AACnB,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,uDAAuD,CAAC;AACnF,QAAQ,IAAI,EAAE,CAAC,iCAAiC,CAAC;AACjD,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,uDAAuD,CAAC;AAC9E,QAAQ,SAAS,EAAE,CAAC,qDAAqD,CAAC;AAC1E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,0CAA0C,CAAC;AAC/E,QAAQ,WAAW,EAAE,CAAC,uDAAuD,CAAC;AAC9E,QAAQ,KAAK,EAAE,CAAC,qDAAqD,CAAC;AACtE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,iDAAiD,CAAC;AACnE,QAAQ,YAAY,EAAE;AACtB,YAAY,6DAA6D;AACzE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,yDAAyD;AACrE,SAAS;AACT,KAAK;AACL,IAAI,SAAS,EAAE,EAAE,GAAG,EAAE,CAAC,iBAAiB,CAAC,EAAE;AAC3C,IAAI,SAAS,EAAE;AACf,QAAQ,sBAAsB,EAAE;AAChC,YAAY,4DAA4D;AACxE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,4DAA4D;AACxE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,mEAAmE;AAC/E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,kEAAkE;AAC9E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,wGAAwG;AACpH,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,8EAA8E;AAC1F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,4EAA4E;AACxF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,4EAA4E;AACxF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,mFAAmF;AAC/F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,kFAAkF;AAC9F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,8FAA8F;AAC1G,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,wHAAwH;AACpI,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,iCAAiC;AAC7C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,YAAY;AACZ,gBAAgB,UAAU,EAAE,qIAAqI;AACjK,aAAa;AACb,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,2DAA2D;AACvE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,2DAA2D;AACvE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,kEAAkE;AAC9E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,iEAAiE;AAC7E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,uGAAuG;AACnH,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,6EAA6E;AACzF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,gBAAgB,EAAE,CAAC,oDAAoD,CAAC;AAChF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,MAAM,EAAE;AACjC,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,oDAAoD,CAAC;AAC/E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,yFAAyF;AACrG,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE;AACrC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,4EAA4E;AACxF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,4EAA4E;AACxF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,oDAAoD,CAAC;AACjF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,gDAAgD;AAC5D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,mDAAmD,CAAC;AAC7E,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,6EAA6E;AACzF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,2CAA2C,CAAC;AACzE,QAAQ,eAAe,EAAE,CAAC,iCAAiC,CAAC;AAC5D,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,sBAAsB,EAAE;AAChC,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,uCAAuC,CAAC;AACtE,QAAQ,0BAA0B,EAAE,CAAC,kBAAkB,CAAC;AACxD,QAAQ,UAAU,EAAE,CAAC,kCAAkC,CAAC;AACxD,QAAQ,WAAW,EAAE,CAAC,wBAAwB,CAAC;AAC/C,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,0BAA0B,EAAE,CAAC,2CAA2C,CAAC;AACjF,QAAQ,eAAe,EAAE;AACzB,YAAY,kCAAkC;AAC9C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,YAAY,CAAC,EAAE,EAAE;AACvD,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,uDAAuD;AACnE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,UAAU,CAAC,EAAE,EAAE;AACrD,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,kCAAkC,CAAC;AAC3D,QAAQ,iBAAiB,EAAE,CAAC,qDAAqD,CAAC;AAClF,QAAQ,MAAM,EAAE,CAAC,8BAA8B,CAAC;AAChD,QAAQ,wBAAwB,EAAE;AAClC,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,0EAA0E;AACtF,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,oDAAoD,CAAC;AACnF,QAAQ,+BAA+B,EAAE;AACzC,YAAY,+EAA+E;AAC3F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,4CAA4C,CAAC;AACvE,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,8CAA8C,CAAC;AACpE,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,oCAAoC;AAChD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,YAAY,CAAC,EAAE,EAAE;AACvD,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,oDAAoD,CAAC;AAC7E,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,8CAA8C,CAAC;AACvE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,uDAAuD;AACnE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,mDAAmD;AAC/D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,yCAAyC;AACrD,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,wBAAwB,CAAC,EAAE;AAC5D,SAAS;AACT,QAAQ,sBAAsB,EAAE,CAAC,yCAAyC,CAAC;AAC3E,QAAQ,sBAAsB,EAAE,CAAC,yCAAyC,CAAC;AAC3E,QAAQ,4BAA4B,EAAE;AACtC,YAAY,oDAAoD;AAChE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,gDAAgD;AAC5D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,GAAG,EAAE,CAAC,2BAA2B,CAAC;AAC1C,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,uEAAuE;AACnF,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,wCAAwC,CAAC;AACtE,QAAQ,yBAAyB,EAAE;AACnC,YAAY,wFAAwF;AACpG,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,kCAAkC;AAC9C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,0EAA0E;AACtF,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,6CAA6C,CAAC;AAClE,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,0CAA0C,CAAC;AAC/D,QAAQ,qBAAqB,EAAE,CAAC,gDAAgD,CAAC;AACjF,QAAQ,8BAA8B,EAAE;AACxC,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,gDAAgD,CAAC;AACnF,QAAQ,SAAS,EAAE,CAAC,yCAAyC,CAAC;AAC9D,QAAQ,sBAAsB,EAAE,CAAC,iDAAiD,CAAC;AACnF,QAAQ,gBAAgB,EAAE,CAAC,iDAAiD,CAAC;AAC7E,QAAQ,4BAA4B,EAAE;AACtC,YAAY,4EAA4E;AACxF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,0BAA0B,EAAE,CAAC,6CAA6C,CAAC;AACnF,QAAQ,UAAU,EAAE,CAAC,2CAA2C,CAAC;AACjE,QAAQ,oBAAoB,EAAE,CAAC,8CAA8C,CAAC;AAC9E,QAAQ,YAAY,EAAE,CAAC,yCAAyC,CAAC;AACjE,QAAQ,aAAa,EAAE,CAAC,uDAAuD,CAAC;AAChF,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,+CAA+C,CAAC;AAC9E,QAAQ,gBAAgB,EAAE,CAAC,2CAA2C,CAAC;AACvE,QAAQ,QAAQ,EAAE,CAAC,iCAAiC,CAAC;AACrD,QAAQ,aAAa,EAAE,CAAC,mDAAmD,CAAC;AAC5E,QAAQ,qBAAqB,EAAE,CAAC,+CAA+C,CAAC;AAChF,QAAQ,8BAA8B,EAAE;AACxC,YAAY,sFAAsF;AAClG,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,4CAA4C,CAAC;AACzE,QAAQ,SAAS,EAAE,CAAC,kCAAkC,CAAC;AACvD,QAAQ,oBAAoB,EAAE,CAAC,wCAAwC,CAAC;AACxE,QAAQ,UAAU,EAAE,CAAC,iDAAiD,CAAC;AACvE,QAAQ,eAAe,EAAE,CAAC,sDAAsD,CAAC;AACjF,QAAQ,eAAe,EAAE,CAAC,+CAA+C,CAAC;AAC1E,QAAQ,yBAAyB,EAAE;AACnC,YAAY,+EAA+E;AAC3F,SAAS;AACT,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,iDAAiD,CAAC;AACxE,QAAQ,eAAe,EAAE,CAAC,qDAAqD,CAAC;AAChF,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,QAAQ,EAAE,CAAC,yCAAyC,CAAC;AAC7D,QAAQ,UAAU,EAAE,CAAC,2CAA2C,CAAC;AACjE,QAAQ,uBAAuB,EAAE;AACjC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,oCAAoC,CAAC;AAC5D,QAAQ,yBAAyB,EAAE;AACnC,YAAY,oEAAoE;AAChF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,yCAAyC,CAAC;AACtE,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,oCAAoC,CAAC;AACzE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,mCAAmC,CAAC;AAC1D,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,cAAc,EAAE,CAAC,gCAAgC,CAAC;AAC1D,QAAQ,sBAAsB,EAAE;AAChC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,uCAAuC,CAAC;AAClE,QAAQ,wBAAwB,EAAE,CAAC,iBAAiB,CAAC;AACrD,QAAQ,UAAU,EAAE,CAAC,uBAAuB,CAAC;AAC7C,QAAQ,WAAW,EAAE,CAAC,6BAA6B,CAAC;AACpD,QAAQ,SAAS,EAAE,CAAC,iCAAiC,CAAC;AACtD,QAAQ,eAAe,EAAE,CAAC,uCAAuC,CAAC;AAClE,QAAQ,mCAAmC,EAAE,CAAC,kCAAkC,CAAC;AACjF,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,eAAe,EAAE,CAAC,wCAAwC,CAAC;AACnE,QAAQ,UAAU,EAAE,CAAC,mBAAmB,CAAC;AACzC,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,sDAAsD;AAClE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,oCAAoC,CAAC;AAC5D,QAAQ,QAAQ,EAAE,CAAC,gCAAgC,CAAC;AACpD,QAAQ,SAAS,EAAE,CAAC,iCAAiC,CAAC;AACtD,QAAQ,YAAY,EAAE,CAAC,iCAAiC,CAAC;AACzD,QAAQ,KAAK,EAAE,CAAC,mCAAmC,CAAC;AACpD,QAAQ,WAAW,EAAE,CAAC,kDAAkD,CAAC;AACzE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,6EAA6E;AACzF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,MAAM,EAAE;AACjC,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2FAA2F;AACvG,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE;AACrC,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,kFAAkF;AAC9F,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,8EAA8E;AAC1F,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,8EAA8E;AAC1F,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,qDAAqD,CAAC;AAC7E,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,kCAAkC;AAC9C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,yCAAyC,CAAC;AACtE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,0EAA0E;AACtF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,MAAM,EAAE;AACjC,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,wFAAwF;AACpG,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE;AACrC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,kDAAkD,CAAC;AAC7E,QAAQ,QAAQ,EAAE,CAAC,qCAAqC,CAAC;AACzD,QAAQ,MAAM,EAAE,CAAC,6BAA6B,CAAC;AAC/C,QAAQ,sBAAsB,EAAE;AAChC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,mDAAmD,CAAC;AAClF,QAAQ,+BAA+B,EAAE,CAAC,iCAAiC,CAAC;AAC5E,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,wFAAwF;AACpG,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,mDAAmD,CAAC;AAC5E,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,iFAAiF;AAC7F,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,EAAE;AACjE,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,iFAAiF;AAC7F,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,6CAA6C,CAAC;AACtE,QAAQ,0BAA0B,EAAE;AACpC,YAAY,oDAAoD;AAChE,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,sEAAsE;AAClF,YAAY,EAAE,OAAO,EAAE,4BAA4B,EAAE;AACrD,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE;AACZ,QAAQ,IAAI,EAAE,CAAC,kBAAkB,CAAC;AAClC,QAAQ,OAAO,EAAE,CAAC,qBAAqB,EAAE,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC;AAChF,QAAQ,qBAAqB,EAAE,CAAC,oBAAoB,CAAC;AACrD,QAAQ,MAAM,EAAE,CAAC,oBAAoB,CAAC;AACtC,QAAQ,KAAK,EAAE,CAAC,0BAA0B,CAAC;AAC3C,QAAQ,MAAM,EAAE,CAAC,oBAAoB,EAAE,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC;AAC9E,QAAQ,KAAK,EAAE,CAAC,mBAAmB,CAAC;AACpC,KAAK;AACL,IAAI,cAAc,EAAE;AACpB,QAAQ,QAAQ,EAAE;AAClB,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,kDAAkD,CAAC;AAC/E,QAAQ,WAAW,EAAE;AACrB,YAAY,mEAAmE;AAC/E,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,yDAAyD;AACrE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,yDAAyD;AACrE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,wBAAwB,CAAC;AAC1C,QAAQ,4BAA4B,EAAE;AACtC,YAAY,6EAA6E;AACzF,SAAS;AACT,QAAQ,qBAAqB,EAAE,CAAC,gDAAgD,CAAC;AACjF,QAAQ,4BAA4B,EAAE;AACtC,YAAY,gGAAgG;AAC5G,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,sCAAsC,CAAC;AAC7D,QAAQ,SAAS,EAAE,CAAC,mCAAmC,CAAC;AACxD,QAAQ,yBAAyB,EAAE;AACnC,YAAY,6FAA6F;AACzG,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,IAAI,EAAE,CAAC,uBAAuB,CAAC;AACvC,QAAQ,cAAc,EAAE,CAAC,yCAAyC,CAAC;AACnE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,+CAA+C,CAAC;AAC/E,QAAQ,wBAAwB,EAAE,CAAC,iBAAiB,CAAC;AACrD,QAAQ,gBAAgB,EAAE,CAAC,2CAA2C,CAAC;AACvE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,4CAA4C;AACxD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,yCAAyC,CAAC;AACnE,QAAQ,4BAA4B,EAAE;AACtC,YAAY,6DAA6D;AACzE,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,+FAA+F;AAC3G,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,qCAAqC,CAAC;AAC5D,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,wBAAwB,EAAE,CAAC,mBAAmB,CAAC;AACvD,QAAQ,KAAK,EAAE,CAAC,6BAA6B,CAAC;AAC9C,QAAQ,YAAY,EAAE,CAAC,6BAA6B,CAAC;AACrD,QAAQ,qBAAqB,EAAE,CAAC,+CAA+C,CAAC;AAChF,QAAQ,oCAAoC,EAAE,CAAC,gCAAgC,CAAC;AAChF,QAAQ,4BAA4B,EAAE,CAAC,qBAAqB,CAAC;AAC7D,QAAQ,kCAAkC,EAAE,CAAC,iBAAiB,CAAC;AAC/D,QAAQ,2BAA2B,EAAE,CAAC,qBAAqB,CAAC;AAC5D,QAAQ,4BAA4B,EAAE,CAAC,oCAAoC,CAAC;AAC5E,QAAQ,kCAAkC,EAAE,CAAC,4BAA4B,CAAC;AAC1E,QAAQ,MAAM,EAAE,CAAC,gCAAgC,CAAC;AAClD,QAAQ,gBAAgB,EAAE,CAAC,WAAW,CAAC;AACvC,QAAQ,aAAa,EAAE,CAAC,uBAAuB,CAAC;AAChD,QAAQ,iBAAiB,EAAE,CAAC,iCAAiC,CAAC;AAC9D,QAAQ,yBAAyB,EAAE,CAAC,iCAAiC,CAAC;AACtE,QAAQ,+BAA+B,EAAE,CAAC,yBAAyB,CAAC;AACpE,QAAQ,IAAI,EAAE,CAAC,YAAY,CAAC;AAC5B,QAAQ,0BAA0B,EAAE,CAAC,kBAAkB,CAAC;AACxD,QAAQ,0BAA0B,EAAE,CAAC,kBAAkB,CAAC;AACxD,QAAQ,2BAA2B,EAAE,CAAC,qBAAqB,CAAC;AAC5D,QAAQ,iCAAiC,EAAE,CAAC,qBAAqB,CAAC;AAClE,QAAQ,oBAAoB,EAAE,CAAC,iCAAiC,CAAC;AACjE,QAAQ,oBAAoB,EAAE,CAAC,iCAAiC,CAAC;AACjE,QAAQ,2BAA2B,EAAE,CAAC,oBAAoB,CAAC;AAC3D,QAAQ,kBAAkB,EAAE,CAAC,gCAAgC,CAAC;AAC9D,QAAQ,gCAAgC,EAAE,CAAC,yBAAyB,CAAC;AACrE,QAAQ,qBAAqB,EAAE,CAAC,4BAA4B,CAAC;AAC7D,QAAQ,iCAAiC,EAAE,CAAC,gBAAgB,CAAC;AAC7D,QAAQ,yCAAyC,EAAE,CAAC,8BAA8B,CAAC;AACnF,QAAQ,OAAO,EAAE,CAAC,gCAAgC,CAAC;AACnD,QAAQ,QAAQ,EAAE,CAAC,mCAAmC,CAAC;AACvD,QAAQ,mBAAmB,EAAE,CAAC,aAAa,CAAC;AAC5C,KAAK;AACL,CAAC;;AC33CM,MAAM,OAAO,GAAG,mBAAmB,CAAC;;ACApC,SAAS,kBAAkB,CAAC,OAAO,EAAE,YAAY,EAAE;AAC1D,IAAI,MAAM,UAAU,GAAG,EAAE,CAAC;AAC1B,IAAI,KAAK,MAAM,CAAC,KAAK,EAAE,SAAS,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;AACnE,QAAQ,KAAK,MAAM,CAAC,UAAU,EAAE,QAAQ,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;AACxE,YAAY,MAAM,CAAC,KAAK,EAAE,QAAQ,EAAE,WAAW,CAAC,GAAG,QAAQ,CAAC;AAC5D,YAAY,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACnD,YAAY,MAAM,gBAAgB,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,EAAE,QAAQ,CAAC,CAAC;AAC9E,YAAY,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;AACpC,gBAAgB,UAAU,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC;AACvC,aAAa;AACb,YAAY,MAAM,YAAY,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;AACnD,YAAY,IAAI,WAAW,EAAE;AAC7B,gBAAgB,YAAY,CAAC,UAAU,CAAC,GAAG,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,gBAAgB,EAAE,WAAW,CAAC,CAAC;AAC/G,gBAAgB,SAAS;AACzB,aAAa;AACb,YAAY,YAAY,CAAC,UAAU,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;AAClF,SAAS;AACT,KAAK;AACL,IAAI,OAAO,UAAU,CAAC;AACtB,CAAC;AACD,SAAS,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,QAAQ,EAAE,WAAW,EAAE;AACrE,IAAI,MAAM,mBAAmB,GAAG,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACnE;AACA,IAAI,SAAS,eAAe,CAAC,GAAG,IAAI,EAAE;AACtC;AACA,QAAQ,IAAI,OAAO,GAAG,mBAAmB,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;AAClE;AACA,QAAQ,IAAI,WAAW,CAAC,SAAS,EAAE;AACnC,YAAY,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE;AACjD,gBAAgB,IAAI,EAAE,OAAO,CAAC,WAAW,CAAC,SAAS,CAAC;AACpD,gBAAgB,CAAC,WAAW,CAAC,SAAS,GAAG,SAAS;AAClD,aAAa,CAAC,CAAC;AACf,YAAY,OAAO,mBAAmB,CAAC,OAAO,CAAC,CAAC;AAChD,SAAS;AACT,QAAQ,IAAI,WAAW,CAAC,OAAO,EAAE;AACjC,YAAY,MAAM,CAAC,QAAQ,EAAE,aAAa,CAAC,GAAG,WAAW,CAAC,OAAO,CAAC;AAClE,YAAY,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,EAAE,UAAU,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,EAAE,CAAC,CAAC,CAAC;AAC5H,SAAS;AACT,QAAQ,IAAI,WAAW,CAAC,UAAU,EAAE;AACpC,YAAY,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;AACrD,SAAS;AACT,QAAQ,IAAI,WAAW,CAAC,iBAAiB,EAAE;AAC3C;AACA,YAAY,MAAM,OAAO,GAAG,mBAAmB,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;AACxE,YAAY,KAAK,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,WAAW,CAAC,iBAAiB,CAAC,EAAE;AACvF,gBAAgB,IAAI,IAAI,IAAI,OAAO,EAAE;AACrC,oBAAoB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,uCAAuC,EAAE,KAAK,CAAC,CAAC,EAAE,UAAU,CAAC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AACzI,oBAAoB,IAAI,EAAE,KAAK,IAAI,OAAO,CAAC,EAAE;AAC7C,wBAAwB,OAAO,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;AACvD,qBAAqB;AACrB,oBAAoB,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC;AACzC,iBAAiB;AACjB,aAAa;AACb,YAAY,OAAO,mBAAmB,CAAC,OAAO,CAAC,CAAC;AAChD,SAAS;AACT;AACA,QAAQ,OAAO,mBAAmB,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5C,KAAK;AACL,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,eAAe,EAAE,mBAAmB,CAAC,CAAC;AAC/D,CAAC;;ACxDM,SAAS,mBAAmB,CAAC,OAAO,EAAE;AAC7C,IAAI,MAAM,GAAG,GAAG,kBAAkB,CAAC,OAAO,EAAEA,SAAS,CAAC,CAAC;AACvD,IAAI,OAAO;AACX,QAAQ,GAAG,GAAG;AACd,QAAQ,IAAI,EAAE,GAAG;AACjB,KAAK,CAAC;AACN,CAAC;AACD,mBAAmB,CAAC,OAAO,GAAG,OAAO,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/package.json b/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/package.json deleted file mode 100644 index ce60af44..00000000 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/package.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "name": "@octokit/plugin-rest-endpoint-methods", - "description": "Octokit plugin adding one method for all of api.github.com REST API endpoints", - "version": "4.15.1", - "license": "MIT", - "files": [ - "dist-*/", - "bin/" - ], - "pika": true, - "sideEffects": false, - "keywords": [ - "github", - "api", - "sdk", - "toolkit" - ], - "repository": "github:octokit/plugin-rest-endpoint-methods.js", - "dependencies": { - "@octokit/types": "^6.13.0", - "deprecation": "^2.3.1" - }, - "peerDependencies": { - "@octokit/core": ">=3" - }, - "devDependencies": { - "@gimenete/type-writer": "^0.1.5", - "@octokit/core": "^3.0.0", - "@octokit/graphql": "^4.3.1", - "@pika/pack": "^0.5.0", - "@pika/plugin-build-node": "^0.9.0", - "@pika/plugin-build-web": "^0.9.0", - "@pika/plugin-ts-standard-pkg": "^0.9.0", - "@types/fetch-mock": "^7.3.1", - "@types/jest": "^26.0.0", - "@types/node": "^14.0.4", - "fetch-mock": "^9.0.0", - "fs-extra": "^9.0.0", - "jest": "^26.1.0", - "lodash.camelcase": "^4.3.0", - "lodash.set": "^4.3.2", - "lodash.upperfirst": "^4.3.1", - "mustache": "^4.0.0", - "npm-run-all": "^4.1.5", - "prettier": "^2.0.1", - "semantic-release": "^17.0.0", - "semantic-release-plugin-update-version-in-files": "^1.0.0", - "sort-keys": "^4.0.0", - "string-to-jsdoc-comment": "^1.0.0", - "ts-jest": "^26.1.3", - "typescript": "^4.0.2" - }, - "publishConfig": { - "access": "public" - }, - "source": "dist-src/index.js", - "types": "dist-types/index.d.ts", - "main": "dist-node/index.js", - "module": "dist-web/index.js" -} diff --git a/node_modules/@actions/github/package.json b/node_modules/@actions/github/package.json index 0288f178..091cebce 100644 --- a/node_modules/@actions/github/package.json +++ b/node_modules/@actions/github/package.json @@ -1,12 +1,12 @@ { "name": "@actions/github", - "version": "4.0.0", + "version": "5.0.0", "description": "Actions github lib", "keywords": [ "github", "actions" ], - "homepage": "https://github.com/actions/toolkit/tree/master/packages/github", + "homepage": "https://github.com/actions/toolkit/tree/main/packages/github", "license": "MIT", "main": "lib/github.js", "types": "lib/github.d.ts", @@ -27,7 +27,7 @@ "directory": "packages/github" }, "scripts": { - "audit-moderate": "npm install && npm audit --audit-level=moderate", + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", "test": "jest", "build": "tsc", "format": "prettier --write **/*.ts", @@ -38,13 +38,13 @@ "url": "https://github.com/actions/toolkit/issues" }, "dependencies": { - "@actions/http-client": "^1.0.8", - "@octokit/core": "^3.0.0", - "@octokit/plugin-paginate-rest": "^2.2.3", - "@octokit/plugin-rest-endpoint-methods": "^4.0.0" + "@actions/http-client": "^1.0.11", + "@octokit/core": "^3.4.0", + "@octokit/plugin-paginate-rest": "^2.13.3", + "@octokit/plugin-rest-endpoint-methods": "^5.1.1" }, "devDependencies": { - "jest": "^25.1.0", - "proxy": "^1.0.1" + "jest": "^26.6.3", + "proxy": "^1.0.2" } } diff --git a/node_modules/@nodelib/fs.scandir/.eslintcache b/node_modules/@nodelib/fs.scandir/.eslintcache deleted file mode 100644 index 733e3f40..00000000 --- a/node_modules/@nodelib/fs.scandir/.eslintcache +++ /dev/null @@ -1 +0,0 @@ -[{"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\adapters\\fs.spec.ts":"1","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\adapters\\fs.ts":"2","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\constants.ts":"3","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\index.spec.ts":"4","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\index.ts":"5","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\providers\\async.spec.ts":"6","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\providers\\async.ts":"7","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\providers\\common.spec.ts":"8","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\providers\\common.ts":"9","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\providers\\sync.spec.ts":"10","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\providers\\sync.ts":"11","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\settings.spec.ts":"12","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\settings.ts":"13","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\types\\index.ts":"14","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\utils\\fs.spec.ts":"15","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\utils\\fs.ts":"16","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\utils\\index.ts":"17"},{"size":807,"mtime":1609075886204,"results":"18","hashOfConfig":"19"},{"size":662,"mtime":1609075886204,"results":"20","hashOfConfig":"19"},{"size":629,"mtime":1609075886205,"results":"21","hashOfConfig":"19"},{"size":2048,"mtime":1609075886205,"results":"22","hashOfConfig":"19"},{"size":1618,"mtime":1609075886205,"results":"23","hashOfConfig":"19"},{"size":6536,"mtime":1609075886206,"results":"24","hashOfConfig":"19"},{"size":3511,"mtime":1609075886207,"results":"25","hashOfConfig":"19"},{"size":981,"mtime":1609075886207,"results":"26","hashOfConfig":"19"},{"size":278,"mtime":1609075886208,"results":"27","hashOfConfig":"19"},{"size":5757,"mtime":1609075886208,"results":"28","hashOfConfig":"19"},{"size":1679,"mtime":1609075886209,"results":"29","hashOfConfig":"19"},{"size":940,"mtime":1609075886210,"results":"30","hashOfConfig":"19"},{"size":1210,"mtime":1609075886211,"results":"31","hashOfConfig":"19"},{"size":361,"mtime":1609075886212,"results":"32","hashOfConfig":"19"},{"size":626,"mtime":1609075886213,"results":"33","hashOfConfig":"19"},{"size":949,"mtime":1609075886213,"results":"34","hashOfConfig":"19"},{"size":45,"mtime":1609075886214,"results":"35","hashOfConfig":"19"},{"filePath":"36","messages":"37","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},"1uegd6b",{"filePath":"38","messages":"39","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"40","messages":"41","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"42","messages":"43","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"44","messages":"45","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"46","messages":"47","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"48","messages":"49","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"50","messages":"51","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"52","messages":"53","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"54","messages":"55","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"56","messages":"57","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"58","messages":"59","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"60","messages":"61","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"62","messages":"63","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"64","messages":"65","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"66","messages":"67","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"68","messages":"69","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\adapters\\fs.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\adapters\\fs.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\constants.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\index.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\index.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\providers\\async.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\providers\\async.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\providers\\common.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\providers\\common.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\providers\\sync.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\providers\\sync.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\settings.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\settings.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\types\\index.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\utils\\fs.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\utils\\fs.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.scandir\\src\\utils\\index.ts",[]] \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts b/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts index d0adcb4f..827f1db0 100644 --- a/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts +++ b/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts @@ -1,13 +1,20 @@ -/// -import * as fs from 'fs'; -export declare type FileSystemAdapter = { - lstat: typeof fs.lstat; - stat: typeof fs.stat; - lstatSync: typeof fs.lstatSync; - statSync: typeof fs.statSync; - readdir: typeof fs.readdir; - readdirSync: typeof fs.readdirSync; -}; -export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; -export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; -//# sourceMappingURL=fs.d.ts.map \ No newline at end of file +import type * as fsStat from '@nodelib/fs.stat'; +import type { Dirent, ErrnoException } from '../types'; +export interface ReaddirAsynchronousMethod { + (filepath: string, options: { + withFileTypes: true; + }, callback: (error: ErrnoException | null, files: Dirent[]) => void): void; + (filepath: string, callback: (error: ErrnoException | null, files: string[]) => void): void; +} +export interface ReaddirSynchronousMethod { + (filepath: string, options: { + withFileTypes: true; + }): Dirent[]; + (filepath: string): string[]; +} +export declare type FileSystemAdapter = fsStat.FileSystemAdapter & { + readdir: ReaddirAsynchronousMethod; + readdirSync: ReaddirSynchronousMethod; +}; +export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts.map b/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts.map deleted file mode 100644 index 3cc3d1e8..00000000 --- a/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"fs.d.ts","sourceRoot":"","sources":["../../src/adapters/fs.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AAEzB,oBAAY,iBAAiB,GAAG;IAC/B,KAAK,EAAE,OAAO,EAAE,CAAC,KAAK,CAAC;IACvB,IAAI,EAAE,OAAO,EAAE,CAAC,IAAI,CAAC;IACrB,SAAS,EAAE,OAAO,EAAE,CAAC,SAAS,CAAC;IAC/B,QAAQ,EAAE,OAAO,EAAE,CAAC,QAAQ,CAAC;IAC7B,OAAO,EAAE,OAAO,EAAE,CAAC,OAAO,CAAC;IAC3B,WAAW,EAAE,OAAO,EAAE,CAAC,WAAW,CAAC;CACnC,CAAC;AAEF,eAAO,MAAM,mBAAmB,EAAE,iBAOjC,CAAC;AAEF,wBAAgB,uBAAuB,CAAC,SAAS,CAAC,EAAE,OAAO,CAAC,iBAAiB,CAAC,GAAG,iBAAiB,CASjG"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.js b/node_modules/@nodelib/fs.scandir/out/adapters/fs.js index a1d16ba9..f0fe0220 100644 --- a/node_modules/@nodelib/fs.scandir/out/adapters/fs.js +++ b/node_modules/@nodelib/fs.scandir/out/adapters/fs.js @@ -1,19 +1,19 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; -const fs = require("fs"); -exports.FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - stat: fs.stat, - lstatSync: fs.lstatSync, - statSync: fs.statSync, - readdir: fs.readdir, - readdirSync: fs.readdirSync -}; -function createFileSystemAdapter(fsMethods) { - if (fsMethods === undefined) { - return exports.FILE_SYSTEM_ADAPTER; - } - return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); -} -exports.createFileSystemAdapter = createFileSystemAdapter; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +exports.FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + stat: fs.stat, + lstatSync: fs.lstatSync, + statSync: fs.statSync, + readdir: fs.readdir, + readdirSync: fs.readdirSync +}; +function createFileSystemAdapter(fsMethods) { + if (fsMethods === undefined) { + return exports.FILE_SYSTEM_ADAPTER; + } + return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); +} +exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.spec.d.ts b/node_modules/@nodelib/fs.scandir/out/adapters/fs.spec.d.ts deleted file mode 100644 index 2858c4bc..00000000 --- a/node_modules/@nodelib/fs.scandir/out/adapters/fs.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=fs.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.spec.d.ts.map b/node_modules/@nodelib/fs.scandir/out/adapters/fs.spec.d.ts.map deleted file mode 100644 index 8e096bbc..00000000 --- a/node_modules/@nodelib/fs.scandir/out/adapters/fs.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"fs.spec.d.ts","sourceRoot":"","sources":["../../src/adapters/fs.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.spec.js b/node_modules/@nodelib/fs.scandir/out/adapters/fs.spec.js deleted file mode 100644 index 1b9031e7..00000000 --- a/node_modules/@nodelib/fs.scandir/out/adapters/fs.spec.js +++ /dev/null @@ -1,20 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const fs_macchiato_1 = require("../../../fs.macchiato"); -const adapter = require("./fs"); -describe('Adapters → FileSystem', () => { - it('should return original FS methods', () => { - const expected = adapter.FILE_SYSTEM_ADAPTER; - const actual = adapter.createFileSystemAdapter(); - assert.deepStrictEqual(actual, expected); - }); - it('should return custom FS methods', () => { - const customLstatSyncMethod = () => new fs_macchiato_1.Stats(); - const expected = Object.assign(Object.assign({}, adapter.FILE_SYSTEM_ADAPTER), { lstatSync: customLstatSyncMethod }); - const actual = adapter.createFileSystemAdapter({ - lstatSync: customLstatSyncMethod - }); - assert.deepStrictEqual(actual, expected); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/out/constants.d.ts b/node_modules/@nodelib/fs.scandir/out/constants.d.ts index bb3f4fde..33f17497 100644 --- a/node_modules/@nodelib/fs.scandir/out/constants.d.ts +++ b/node_modules/@nodelib/fs.scandir/out/constants.d.ts @@ -1,5 +1,4 @@ -/** - * IS `true` for Node.js 10.10 and greater. - */ -export declare const IS_SUPPORT_READDIR_WITH_FILE_TYPES: boolean; -//# sourceMappingURL=constants.d.ts.map \ No newline at end of file +/** + * IS `true` for Node.js 10.10 and greater. + */ +export declare const IS_SUPPORT_READDIR_WITH_FILE_TYPES: boolean; diff --git a/node_modules/@nodelib/fs.scandir/out/constants.d.ts.map b/node_modules/@nodelib/fs.scandir/out/constants.d.ts.map deleted file mode 100644 index a986bb6f..00000000 --- a/node_modules/@nodelib/fs.scandir/out/constants.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":"AAWA;;GAEG;AACH,eAAO,MAAM,kCAAkC,SAAuD,CAAC"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/constants.js b/node_modules/@nodelib/fs.scandir/out/constants.js index 8b6c8d14..7e3d4411 100644 --- a/node_modules/@nodelib/fs.scandir/out/constants.js +++ b/node_modules/@nodelib/fs.scandir/out/constants.js @@ -1,14 +1,17 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; -const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); -const MAJOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); -const MINOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); -const SUPPORTED_MAJOR_VERSION = 10; -const SUPPORTED_MINOR_VERSION = 10; -const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; -const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION; -/** - * IS `true` for Node.js 10.10 and greater. - */ -exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; +const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); +if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) { + throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`); +} +const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); +const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); +const SUPPORTED_MAJOR_VERSION = 10; +const SUPPORTED_MINOR_VERSION = 10; +const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; +const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION; +/** + * IS `true` for Node.js 10.10 and greater. + */ +exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR; diff --git a/node_modules/@nodelib/fs.scandir/out/index.d.ts b/node_modules/@nodelib/fs.scandir/out/index.d.ts index b12b5293..b9da83ed 100644 --- a/node_modules/@nodelib/fs.scandir/out/index.d.ts +++ b/node_modules/@nodelib/fs.scandir/out/index.d.ts @@ -1,13 +1,12 @@ -import { FileSystemAdapter } from './adapters/fs'; -import * as async from './providers/async'; -import Settings, { Options } from './settings'; -import { Dirent, Entry } from './types'; -declare type AsyncCallback = async.AsyncCallback; -declare function scandir(path: string, callback: AsyncCallback): void; -declare function scandir(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -declare namespace scandir { - function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; -} -declare function scandirSync(path: string, optionsOrSettings?: Options | Settings): Entry[]; -export { scandir, scandirSync, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, Options }; -//# sourceMappingURL=index.d.ts.map \ No newline at end of file +import type { FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod } from './adapters/fs'; +import * as async from './providers/async'; +import Settings, { Options } from './settings'; +import type { Dirent, Entry } from './types'; +declare type AsyncCallback = async.AsyncCallback; +declare function scandir(path: string, callback: AsyncCallback): void; +declare function scandir(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace scandir { + function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function scandirSync(path: string, optionsOrSettings?: Options | Settings): Entry[]; +export { scandir, scandirSync, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod, Options }; diff --git a/node_modules/@nodelib/fs.scandir/out/index.d.ts.map b/node_modules/@nodelib/fs.scandir/out/index.d.ts.map deleted file mode 100644 index bc4dec0a..00000000 --- a/node_modules/@nodelib/fs.scandir/out/index.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAClD,OAAO,KAAK,KAAK,MAAM,mBAAmB,CAAC;AAE3C,OAAO,QAAQ,EAAE,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AAC/C,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAExC,aAAK,aAAa,GAAG,KAAK,CAAC,aAAa,CAAC;AAEzC,iBAAS,OAAO,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,aAAa,GAAG,IAAI,CAAC;AAC9D,iBAAS,OAAO,CAAC,IAAI,EAAE,MAAM,EAAE,iBAAiB,EAAE,OAAO,GAAG,QAAQ,EAAE,QAAQ,EAAE,aAAa,GAAG,IAAI,CAAC;AAWrG,OAAO,WAAW,OAAO,CAAC;IACzB,SAAS,aAAa,CAAC,IAAI,EAAE,MAAM,EAAE,iBAAiB,CAAC,EAAE,OAAO,GAAG,QAAQ,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;CAC/F;AAED,iBAAS,WAAW,CAAC,IAAI,EAAE,MAAM,EAAE,iBAAiB,CAAC,EAAE,OAAO,GAAG,QAAQ,GAAG,KAAK,EAAE,CAIlF;AAUD,OAAO,EACN,OAAO,EACP,WAAW,EACX,QAAQ,EAIR,aAAa,EACb,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,OAAO,EACP,CAAC"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/index.js b/node_modules/@nodelib/fs.scandir/out/index.js index 653a5109..99c70d3d 100644 --- a/node_modules/@nodelib/fs.scandir/out/index.js +++ b/node_modules/@nodelib/fs.scandir/out/index.js @@ -1,25 +1,26 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Settings = exports.scandirSync = exports.scandir = void 0; -const async = require("./providers/async"); -const sync = require("./providers/sync"); -const settings_1 = require("./settings"); -exports.Settings = settings_1.default; -function scandir(path, optionsOrSettingsOrCallback, callback) { - if (typeof optionsOrSettingsOrCallback === 'function') { - return async.read(path, getSettings(), optionsOrSettingsOrCallback); - } - async.read(path, getSettings(optionsOrSettingsOrCallback), callback); -} -exports.scandir = scandir; -function scandirSync(path, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - return sync.read(path, settings); -} -exports.scandirSync = scandirSync; -function getSettings(settingsOrOptions = {}) { - if (settingsOrOptions instanceof settings_1.default) { - return settingsOrOptions; - } - return new settings_1.default(settingsOrOptions); -} +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Settings = exports.scandirSync = exports.scandir = void 0; +const async = require("./providers/async"); +const sync = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function scandir(path, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; + } + async.read(path, getSettings(optionsOrSettingsOrCallback), callback); +} +exports.scandir = scandir; +function scandirSync(path, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + return sync.read(path, settings); +} +exports.scandirSync = scandirSync; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/@nodelib/fs.scandir/out/index.spec.d.ts b/node_modules/@nodelib/fs.scandir/out/index.spec.d.ts deleted file mode 100644 index 4e9d2bbe..00000000 --- a/node_modules/@nodelib/fs.scandir/out/index.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=index.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/index.spec.d.ts.map b/node_modules/@nodelib/fs.scandir/out/index.spec.d.ts.map deleted file mode 100644 index 47bd6666..00000000 --- a/node_modules/@nodelib/fs.scandir/out/index.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.spec.d.ts","sourceRoot":"","sources":["../src/index.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/index.spec.js b/node_modules/@nodelib/fs.scandir/out/index.spec.js deleted file mode 100644 index 09773f6b..00000000 --- a/node_modules/@nodelib/fs.scandir/out/index.spec.js +++ /dev/null @@ -1,71 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const fs = require("fs"); -const rimraf = require("rimraf"); -const _1 = require("."); -describe('Package', () => { - before(() => { - rimraf.sync('fixtures'); - fs.mkdirSync('fixtures'); - fs.writeFileSync('fixtures/file.txt', ''); - }); - after(() => { - rimraf.sync('fixtures'); - }); - describe('.scandir', () => { - it('should work without options or settings', (done) => { - _1.scandir('fixtures', (error, entries) => { - assert.strictEqual(error, null); - assert.ok(entries[0].name); - assert.ok(entries[0].path); - assert.ok(entries[0].dirent); - done(); - }); - }); - it('should work with options', (done) => { - _1.scandir('fixtures', { stats: true }, (error, entries) => { - assert.strictEqual(error, null); - assert.ok(entries[0].name); - assert.ok(entries[0].path); - assert.ok(entries[0].dirent); - assert.ok(entries[0].stats); - done(); - }); - }); - it('should work with settings', (done) => { - const settings = new _1.Settings({ stats: true }); - _1.scandir('fixtures', settings, (error, entries) => { - assert.strictEqual(error, null); - assert.ok(entries[0].name); - assert.ok(entries[0].path); - assert.ok(entries[0].dirent); - assert.ok(entries[0].stats); - done(); - }); - }); - }); - describe('.scandirSync', () => { - it('should work without options or settings', () => { - const actual = _1.scandirSync('fixtures'); - assert.ok(actual[0].name); - assert.ok(actual[0].path); - assert.ok(actual[0].dirent); - }); - it('should work with options', () => { - const actual = _1.scandirSync('fixtures', { stats: true }); - assert.ok(actual[0].name); - assert.ok(actual[0].path); - assert.ok(actual[0].dirent); - assert.ok(actual[0].stats); - }); - it('should work with settings', () => { - const settings = new _1.Settings({ stats: true }); - const actual = _1.scandirSync('fixtures', settings); - assert.ok(actual[0].name); - assert.ok(actual[0].path); - assert.ok(actual[0].dirent); - assert.ok(actual[0].stats); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts index def26de3..5829676d 100644 --- a/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts +++ b/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts @@ -1,8 +1,7 @@ -/// -import Settings from '../settings'; -import { Entry } from '../types'; -export declare type AsyncCallback = (err: NodeJS.ErrnoException, entries: Entry[]) => void; -export declare function read(directory: string, settings: Settings, callback: AsyncCallback): void; -export declare function readdirWithFileTypes(directory: string, settings: Settings, callback: AsyncCallback): void; -export declare function readdir(directory: string, settings: Settings, callback: AsyncCallback): void; -//# sourceMappingURL=async.d.ts.map \ No newline at end of file +/// +import type Settings from '../settings'; +import type { Entry } from '../types'; +export declare type AsyncCallback = (error: NodeJS.ErrnoException, entries: Entry[]) => void; +export declare function read(directory: string, settings: Settings, callback: AsyncCallback): void; +export declare function readdirWithFileTypes(directory: string, settings: Settings, callback: AsyncCallback): void; +export declare function readdir(directory: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts.map b/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts.map deleted file mode 100644 index 7f486b5a..00000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"async.d.ts","sourceRoot":"","sources":["../../src/providers/async.ts"],"names":[],"mappings":";AAIA,OAAO,QAAQ,MAAM,aAAa,CAAC;AACnC,OAAO,EAAE,KAAK,EAAS,MAAM,UAAU,CAAC;AASxC,oBAAY,aAAa,GAAG,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,IAAI,CAAC;AAEnF,wBAAgB,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,aAAa,GAAG,IAAI,CAMzF;AAED,wBAAgB,oBAAoB,CAAC,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,aAAa,GAAG,IAAI,CA0BzG;AAwBD,wBAAgB,OAAO,CAAC,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,aAAa,GAAG,IAAI,CAsC5F"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.js b/node_modules/@nodelib/fs.scandir/out/providers/async.js index 182d1af3..e8e2f0a9 100644 --- a/node_modules/@nodelib/fs.scandir/out/providers/async.js +++ b/node_modules/@nodelib/fs.scandir/out/providers/async.js @@ -1,92 +1,104 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; -const fsStat = require("@nodelib/fs.stat"); -const rpl = require("run-parallel"); -const constants_1 = require("../constants"); -const utils = require("../utils"); -const common = require("./common"); -function read(directory, settings, callback) { - if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - return readdirWithFileTypes(directory, settings, callback); - } - return readdir(directory, settings, callback); -} -exports.read = read; -function readdirWithFileTypes(directory, settings, callback) { - settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { - if (readdirError !== null) { - return callFailureCallback(callback, readdirError); - } - const entries = dirents.map((dirent) => ({ - dirent, - name: dirent.name, - path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) - })); - if (!settings.followSymbolicLinks) { - return callSuccessCallback(callback, entries); - } - const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); - rpl(tasks, (rplError, rplEntries) => { - if (rplError !== null) { - return callFailureCallback(callback, rplError); - } - callSuccessCallback(callback, rplEntries); - }); - }); -} -exports.readdirWithFileTypes = readdirWithFileTypes; -function makeRplTaskEntry(entry, settings) { - return (done) => { - if (!entry.dirent.isSymbolicLink()) { - return done(null, entry); - } - settings.fs.stat(entry.path, (statError, stats) => { - if (statError !== null) { - if (settings.throwErrorOnBrokenSymbolicLink) { - return done(statError); - } - return done(null, entry); - } - entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); - return done(null, entry); - }); - }; -} -function readdir(directory, settings, callback) { - settings.fs.readdir(directory, (readdirError, names) => { - if (readdirError !== null) { - return callFailureCallback(callback, readdirError); - } - const filepaths = names.map((name) => common.joinPathSegments(directory, name, settings.pathSegmentSeparator)); - const tasks = filepaths.map((filepath) => { - return (done) => fsStat.stat(filepath, settings.fsStatSettings, done); - }); - rpl(tasks, (rplError, results) => { - if (rplError !== null) { - return callFailureCallback(callback, rplError); - } - const entries = []; - names.forEach((name, index) => { - const stats = results[index]; - const entry = { - name, - path: filepaths[index], - dirent: utils.fs.createDirentFromStats(name, stats) - }; - if (settings.stats) { - entry.stats = stats; - } - entries.push(entry); - }); - callSuccessCallback(callback, entries); - }); - }); -} -exports.readdir = readdir; -function callFailureCallback(callback, error) { - callback(error); -} -function callSuccessCallback(callback, result) { - callback(null, result); -} +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; +const fsStat = require("@nodelib/fs.stat"); +const rpl = require("run-parallel"); +const constants_1 = require("../constants"); +const utils = require("../utils"); +const common = require("./common"); +function read(directory, settings, callback) { + if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { + readdirWithFileTypes(directory, settings, callback); + return; + } + readdir(directory, settings, callback); +} +exports.read = read; +function readdirWithFileTypes(directory, settings, callback) { + settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { + if (readdirError !== null) { + callFailureCallback(callback, readdirError); + return; + } + const entries = dirents.map((dirent) => ({ + dirent, + name: dirent.name, + path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) + })); + if (!settings.followSymbolicLinks) { + callSuccessCallback(callback, entries); + return; + } + const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); + rpl(tasks, (rplError, rplEntries) => { + if (rplError !== null) { + callFailureCallback(callback, rplError); + return; + } + callSuccessCallback(callback, rplEntries); + }); + }); +} +exports.readdirWithFileTypes = readdirWithFileTypes; +function makeRplTaskEntry(entry, settings) { + return (done) => { + if (!entry.dirent.isSymbolicLink()) { + done(null, entry); + return; + } + settings.fs.stat(entry.path, (statError, stats) => { + if (statError !== null) { + if (settings.throwErrorOnBrokenSymbolicLink) { + done(statError); + return; + } + done(null, entry); + return; + } + entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); + done(null, entry); + }); + }; +} +function readdir(directory, settings, callback) { + settings.fs.readdir(directory, (readdirError, names) => { + if (readdirError !== null) { + callFailureCallback(callback, readdirError); + return; + } + const tasks = names.map((name) => { + const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); + return (done) => { + fsStat.stat(path, settings.fsStatSettings, (error, stats) => { + if (error !== null) { + done(error); + return; + } + const entry = { + name, + path, + dirent: utils.fs.createDirentFromStats(name, stats) + }; + if (settings.stats) { + entry.stats = stats; + } + done(null, entry); + }); + }; + }); + rpl(tasks, (rplError, entries) => { + if (rplError !== null) { + callFailureCallback(callback, rplError); + return; + } + callSuccessCallback(callback, entries); + }); + }); +} +exports.readdir = readdir; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, result) { + callback(null, result); +} diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.spec.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/async.spec.d.ts deleted file mode 100644 index c3f82484..00000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/async.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=async.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.spec.d.ts.map b/node_modules/@nodelib/fs.scandir/out/providers/async.spec.d.ts.map deleted file mode 100644 index a67f476b..00000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/async.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"async.spec.d.ts","sourceRoot":"","sources":["../../src/providers/async.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.spec.js b/node_modules/@nodelib/fs.scandir/out/providers/async.spec.js deleted file mode 100644 index b41ad709..00000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/async.spec.js +++ /dev/null @@ -1,180 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const path = require("path"); -const sinon = require("sinon"); -const fs_macchiato_1 = require("../../../fs.macchiato"); -const constants_1 = require("../constants"); -const settings_1 = require("../settings"); -const provider = require("./async"); -const ROOT_PATH = 'root'; -const FIRST_FILE_PATH = 'first.txt'; -const SECOND_FILE_PATH = 'second.txt'; -const FIRST_ENTRY_PATH = path.join(ROOT_PATH, FIRST_FILE_PATH); -const SECOND_ENTRY_PATH = path.join(ROOT_PATH, SECOND_FILE_PATH); -describe('Providers → Async', () => { - describe('.read', () => { - it('should call correct method based on Node.js version', (done) => { - const readdir = sinon.stub(); - readdir.yields(null, []); - const settings = new settings_1.default({ - fs: { readdir: readdir } - }); - provider.read(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - assert.deepStrictEqual(entries, []); - if (constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - sinon.assert.match(readdir.args, [[ROOT_PATH, { withFileTypes: true }, sinon.match.func]]); - } - else { - sinon.assert.match(readdir.args, [[ROOT_PATH, sinon.match.func]]); - } - done(); - }); - }); - it('should always use `readdir` method when the `stats` option is enabled', (done) => { - const readdir = sinon.stub(); - readdir.yields(null, []); - const settings = new settings_1.default({ - fs: { readdir: readdir }, - stats: true - }); - provider.read(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - assert.deepStrictEqual(entries, []); - sinon.assert.match(readdir.args, [[ROOT_PATH, sinon.match.func]]); - done(); - }); - }); - }); - describe('.readdirWithFileTypes', () => { - it('should return entries', (done) => { - const dirent = new fs_macchiato_1.Dirent({ name: FIRST_FILE_PATH }); - const readdir = sinon.stub(); - readdir.yields(null, [dirent]); - const settings = new settings_1.default({ - fs: { readdir: readdir } - }); - const expected = [ - { - dirent, - name: FIRST_FILE_PATH, - path: FIRST_ENTRY_PATH - } - ]; - provider.readdirWithFileTypes(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - sinon.assert.match(readdir.args, [[ROOT_PATH, { withFileTypes: true }, sinon.match.func]]); - assert.deepStrictEqual(entries, expected); - done(); - }); - }); - it('should call fs.stat for symbolic link when the "followSymbolicLink" option is enabled', (done) => { - const firstDirent = new fs_macchiato_1.Dirent({ name: FIRST_FILE_PATH }); - const secondDirent = new fs_macchiato_1.Dirent({ name: SECOND_FILE_PATH, isSymbolicLink: true }); - const stats = new fs_macchiato_1.Stats(); - const readdir = sinon.stub(); - const stat = sinon.stub(); - readdir.yields(null, [firstDirent, secondDirent]); - stat.yields(null, stats); - const settings = new settings_1.default({ - followSymbolicLinks: true, - fs: { - readdir: readdir, - stat: stat - } - }); - provider.readdirWithFileTypes(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - assert.strictEqual(entries.length, 2); - assert.ok(!entries[1].dirent.isSymbolicLink()); - sinon.assert.match(stat.args, [[SECOND_ENTRY_PATH, sinon.match.func]]); - done(); - }); - }); - it('should return lstat for broken symbolic link when the "throwErrorOnBrokenSymbolicLink" option is disabled', (done) => { - const firstDirent = new fs_macchiato_1.Dirent({ name: FIRST_FILE_PATH, isSymbolicLink: true }); - const readdir = sinon.stub(); - const stat = sinon.stub(); - readdir.yields(null, [firstDirent]); - stat.yields(new Error('error')); - const settings = new settings_1.default({ - followSymbolicLinks: true, - throwErrorOnBrokenSymbolicLink: false, - fs: { - readdir: readdir, - stat: stat - } - }); - provider.readdirWithFileTypes(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - assert.strictEqual(entries.length, 1); - assert.ok(entries[0].dirent.isSymbolicLink()); - done(); - }); - }); - it('should throw an error fro broken symbolic link when the "throwErrorOnBrokenSymbolicLink" option is enabled', (done) => { - const firstDirent = new fs_macchiato_1.Dirent({ name: FIRST_FILE_PATH, isSymbolicLink: true }); - const readdir = sinon.stub(); - const stat = sinon.stub(); - readdir.yields(null, [firstDirent]); - stat.yields(new Error('error')); - const settings = new settings_1.default({ - followSymbolicLinks: true, - throwErrorOnBrokenSymbolicLink: true, - fs: { - readdir: readdir, - stat: stat - } - }); - provider.readdirWithFileTypes(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error.message, 'error'); - assert.strictEqual(entries, undefined); - done(); - }); - }); - }); - describe('.readdir', () => { - it('should return entries', (done) => { - const stats = new fs_macchiato_1.Stats(); - const readdir = sinon.stub(); - const lstat = sinon.stub(); - readdir.yields(null, [FIRST_FILE_PATH]); - lstat.yields(null, stats); - const settings = new settings_1.default({ - fs: { - readdir: readdir, - lstat: lstat - } - }); - provider.readdir(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - sinon.assert.match(readdir.args, [[ROOT_PATH, sinon.match.func]]); - sinon.assert.match(lstat.args, [[FIRST_ENTRY_PATH, sinon.match.func]]); - assert.strictEqual(entries[0].name, FIRST_FILE_PATH); - assert.strictEqual(entries[0].path, FIRST_ENTRY_PATH); - assert.strictEqual(entries[0].dirent.name, FIRST_FILE_PATH); - done(); - }); - }); - it('should return entries with `stats` property', (done) => { - const stats = new fs_macchiato_1.Stats(); - const readdir = sinon.stub(); - const lstat = sinon.stub(); - readdir.yields(null, [FIRST_FILE_PATH]); - lstat.yields(null, stats); - const settings = new settings_1.default({ - fs: { - readdir: readdir, - lstat: lstat - }, - stats: true - }); - provider.readdir(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - assert.deepStrictEqual(entries[0].stats, stats); - done(); - }); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts index cbfb4f3b..2b4d08b5 100644 --- a/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts +++ b/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts @@ -1,2 +1 @@ -export declare function joinPathSegments(a: string, b: string, separator: string): string; -//# sourceMappingURL=common.d.ts.map \ No newline at end of file +export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts.map b/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts.map deleted file mode 100644 index 18decb20..00000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"common.d.ts","sourceRoot":"","sources":["../../src/providers/common.ts"],"names":[],"mappings":"AAAA,wBAAgB,gBAAgB,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM,CAShF"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.js b/node_modules/@nodelib/fs.scandir/out/providers/common.js index 2ce7318f..8724cb59 100644 --- a/node_modules/@nodelib/fs.scandir/out/providers/common.js +++ b/node_modules/@nodelib/fs.scandir/out/providers/common.js @@ -1,13 +1,13 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.joinPathSegments = void 0; -function joinPathSegments(a, b, separator) { - /** - * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). - */ - if (a.endsWith(separator)) { - return a + b; - } - return a + separator + b; -} -exports.joinPathSegments = joinPathSegments; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.joinPathSegments = void 0; +function joinPathSegments(a, b, separator) { + /** + * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). + */ + if (a.endsWith(separator)) { + return a + b; + } + return a + separator + b; +} +exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.spec.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/common.spec.d.ts deleted file mode 100644 index c5ccf64b..00000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/common.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=common.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.spec.d.ts.map b/node_modules/@nodelib/fs.scandir/out/providers/common.spec.d.ts.map deleted file mode 100644 index 693b534a..00000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/common.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"common.spec.d.ts","sourceRoot":"","sources":["../../src/providers/common.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.spec.js b/node_modules/@nodelib/fs.scandir/out/providers/common.spec.js deleted file mode 100644 index 8990c5aa..00000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/common.spec.js +++ /dev/null @@ -1,22 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const common = require("./common"); -describe('Readers → Common', () => { - describe('.joinPathSegments', () => { - it('should return concatenated string', () => { - assert.strictEqual(common.joinPathSegments('.', 'a', '/'), './a'); - }); - it('should return correct string when the first segment ens with the separator symbol', () => { - // Unix - assert.strictEqual(common.joinPathSegments('/', 'a', '/'), '/a'); - assert.strictEqual(common.joinPathSegments('//', 'a', '/'), '//a'); - assert.strictEqual(common.joinPathSegments('/a/', 'b', '/'), '/a/b'); - // Windows - assert.strictEqual(common.joinPathSegments('C:/', 'Users', '/'), 'C:/Users'); - assert.strictEqual(common.joinPathSegments('C:\\', 'Users', '\\'), 'C:\\Users'); - assert.strictEqual(common.joinPathSegments('//?/C:/', 'Users', '/'), '//?/C:/Users'); - assert.strictEqual(common.joinPathSegments('\\\\?\\C:\\', 'Users', '\\'), '\\\\?\\C:\\Users'); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts index d6ed468c..e05c8f07 100644 --- a/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts +++ b/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts @@ -1,6 +1,5 @@ -import Settings from '../settings'; -import { Entry } from '../types'; -export declare function read(directory: string, settings: Settings): Entry[]; -export declare function readdirWithFileTypes(directory: string, settings: Settings): Entry[]; -export declare function readdir(directory: string, settings: Settings): Entry[]; -//# sourceMappingURL=sync.d.ts.map \ No newline at end of file +import type Settings from '../settings'; +import type { Entry } from '../types'; +export declare function read(directory: string, settings: Settings): Entry[]; +export declare function readdirWithFileTypes(directory: string, settings: Settings): Entry[]; +export declare function readdir(directory: string, settings: Settings): Entry[]; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts.map b/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts.map deleted file mode 100644 index ba3de162..00000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"sync.d.ts","sourceRoot":"","sources":["../../src/providers/sync.ts"],"names":[],"mappings":"AAGA,OAAO,QAAQ,MAAM,aAAa,CAAC;AACnC,OAAO,EAAE,KAAK,EAAE,MAAM,UAAU,CAAC;AAIjC,wBAAgB,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,GAAG,KAAK,EAAE,CAMnE;AAED,wBAAgB,oBAAoB,CAAC,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,GAAG,KAAK,EAAE,CAwBnF;AAED,wBAAgB,OAAO,CAAC,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,GAAG,KAAK,EAAE,CAmBtE"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.js b/node_modules/@nodelib/fs.scandir/out/providers/sync.js index 1b32e386..146db343 100644 --- a/node_modules/@nodelib/fs.scandir/out/providers/sync.js +++ b/node_modules/@nodelib/fs.scandir/out/providers/sync.js @@ -1,54 +1,54 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; -const fsStat = require("@nodelib/fs.stat"); -const constants_1 = require("../constants"); -const utils = require("../utils"); -const common = require("./common"); -function read(directory, settings) { - if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - return readdirWithFileTypes(directory, settings); - } - return readdir(directory, settings); -} -exports.read = read; -function readdirWithFileTypes(directory, settings) { - const dirents = settings.fs.readdirSync(directory, { withFileTypes: true }); - return dirents.map((dirent) => { - const entry = { - dirent, - name: dirent.name, - path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) - }; - if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) { - try { - const stats = settings.fs.statSync(entry.path); - entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); - } - catch (error) { - if (settings.throwErrorOnBrokenSymbolicLink) { - throw error; - } - } - } - return entry; - }); -} -exports.readdirWithFileTypes = readdirWithFileTypes; -function readdir(directory, settings) { - const names = settings.fs.readdirSync(directory); - return names.map((name) => { - const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); - const stats = fsStat.statSync(entryPath, settings.fsStatSettings); - const entry = { - name, - path: entryPath, - dirent: utils.fs.createDirentFromStats(name, stats) - }; - if (settings.stats) { - entry.stats = stats; - } - return entry; - }); -} -exports.readdir = readdir; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; +const fsStat = require("@nodelib/fs.stat"); +const constants_1 = require("../constants"); +const utils = require("../utils"); +const common = require("./common"); +function read(directory, settings) { + if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { + return readdirWithFileTypes(directory, settings); + } + return readdir(directory, settings); +} +exports.read = read; +function readdirWithFileTypes(directory, settings) { + const dirents = settings.fs.readdirSync(directory, { withFileTypes: true }); + return dirents.map((dirent) => { + const entry = { + dirent, + name: dirent.name, + path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) + }; + if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) { + try { + const stats = settings.fs.statSync(entry.path); + entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); + } + catch (error) { + if (settings.throwErrorOnBrokenSymbolicLink) { + throw error; + } + } + } + return entry; + }); +} +exports.readdirWithFileTypes = readdirWithFileTypes; +function readdir(directory, settings) { + const names = settings.fs.readdirSync(directory); + return names.map((name) => { + const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); + const stats = fsStat.statSync(entryPath, settings.fsStatSettings); + const entry = { + name, + path: entryPath, + dirent: utils.fs.createDirentFromStats(name, stats) + }; + if (settings.stats) { + entry.stats = stats; + } + return entry; + }); +} +exports.readdir = readdir; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.spec.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/sync.spec.d.ts deleted file mode 100644 index 5167ab33..00000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/sync.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=sync.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.spec.d.ts.map b/node_modules/@nodelib/fs.scandir/out/providers/sync.spec.d.ts.map deleted file mode 100644 index aae94987..00000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/sync.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"sync.spec.d.ts","sourceRoot":"","sources":["../../src/providers/sync.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.spec.js b/node_modules/@nodelib/fs.scandir/out/providers/sync.spec.js deleted file mode 100644 index e6e82d47..00000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/sync.spec.js +++ /dev/null @@ -1,144 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const path = require("path"); -const sinon = require("sinon"); -const fs_macchiato_1 = require("../../../fs.macchiato"); -const constants_1 = require("../constants"); -const settings_1 = require("../settings"); -const provider = require("./sync"); -const ROOT_PATH = 'root'; -const FIRST_FILE_PATH = 'first.txt'; -const SECOND_FILE_PATH = 'second.txt'; -const FIRST_ENTRY_PATH = path.join(ROOT_PATH, FIRST_FILE_PATH); -const SECOND_ENTRY_PATH = path.join(ROOT_PATH, SECOND_FILE_PATH); -describe('Providers → Sync', () => { - describe('.read', () => { - it('should call correct method based on Node.js version', () => { - const readdirSync = sinon.stub().returns([]); - const settings = new settings_1.default({ - fs: { readdirSync: readdirSync } - }); - const actual = provider.read(ROOT_PATH, settings); - assert.deepStrictEqual(actual, []); - if (constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - assert.deepStrictEqual(readdirSync.args, [[ROOT_PATH, { withFileTypes: true }]]); - } - else { - assert.deepStrictEqual(readdirSync.args, [[ROOT_PATH]]); - } - }); - it('should always use `readdir` method when the `stats` option is enabled', () => { - const readdirSync = sinon.stub().returns([]); - const settings = new settings_1.default({ - fs: { readdirSync: readdirSync }, - stats: true - }); - provider.read(ROOT_PATH, settings); - assert.deepStrictEqual(readdirSync.args, [[ROOT_PATH]]); - }); - }); - describe('.readdirWithFileTypes', () => { - it('should return entries', () => { - const dirent = new fs_macchiato_1.Dirent({ name: FIRST_FILE_PATH }); - const readdirSync = sinon.stub().returns([dirent]); - const settings = new settings_1.default({ - fs: { readdirSync: readdirSync } - }); - const expected = [ - { - dirent, - name: FIRST_FILE_PATH, - path: FIRST_ENTRY_PATH - } - ]; - const actual = provider.readdirWithFileTypes(ROOT_PATH, settings); - assert.deepStrictEqual(readdirSync.args, [[ROOT_PATH, { withFileTypes: true }]]); - assert.deepStrictEqual(actual, expected); - }); - it('should call fs.stat for symbolic link when the "followSymbolicLink" option is enabled', () => { - const firstDirent = new fs_macchiato_1.Dirent({ name: FIRST_FILE_PATH }); - const secondDirent = new fs_macchiato_1.Dirent({ name: SECOND_FILE_PATH, isSymbolicLink: true }); - const stats = new fs_macchiato_1.Stats(); - const readdirSync = sinon.stub().returns([firstDirent, secondDirent]); - const statSync = sinon.stub().returns(stats); - const settings = new settings_1.default({ - followSymbolicLinks: true, - fs: { - readdirSync: readdirSync, - statSync: statSync - } - }); - const actual = provider.readdirWithFileTypes(ROOT_PATH, settings); - assert.strictEqual(actual.length, 2); - assert.deepStrictEqual(statSync.args, [[SECOND_ENTRY_PATH]]); - assert.ok(!actual[1].dirent.isSymbolicLink()); - }); - it('should return lstat for broken symbolic link when the "throwErrorOnBrokenSymbolicLink" option is disabled', () => { - const dirent = new fs_macchiato_1.Dirent({ name: FIRST_FILE_PATH, isSymbolicLink: true }); - const readdirSync = sinon.stub().returns([dirent]); - const statSync = () => { - throw new Error('error'); - }; - const settings = new settings_1.default({ - followSymbolicLinks: true, - throwErrorOnBrokenSymbolicLink: false, - fs: { - readdirSync: readdirSync, - statSync: statSync - } - }); - const actual = provider.readdirWithFileTypes(ROOT_PATH, settings); - assert.strictEqual(actual.length, 1); - }); - it('should throw an error fro broken symbolic link when the "throwErrorOnBrokenSymbolicLink" option is enabled', () => { - const dirent = new fs_macchiato_1.Dirent({ name: FIRST_FILE_PATH, isSymbolicLink: true }); - const readdirSync = sinon.stub().returns([dirent]); - const statSync = () => { - throw new Error('error'); - }; - const settings = new settings_1.default({ - followSymbolicLinks: true, - throwErrorOnBrokenSymbolicLink: true, - fs: { - readdirSync: readdirSync, - statSync: statSync - } - }); - const expectedErrorMessageRe = /Error: error/; - assert.throws(() => provider.readdirWithFileTypes(ROOT_PATH, settings), expectedErrorMessageRe); - }); - }); - describe('.readdir', () => { - it('should return entries', () => { - const stats = new fs_macchiato_1.Stats(); - const readdirSync = sinon.stub().returns([FIRST_FILE_PATH]); - const lstatSync = sinon.stub().returns(stats); - const settings = new settings_1.default({ - fs: { - readdirSync: readdirSync, - lstatSync: lstatSync - } - }); - const actual = provider.readdir(ROOT_PATH, settings); - assert.deepStrictEqual(readdirSync.args, [[ROOT_PATH]]); - assert.strictEqual(actual[0].name, FIRST_FILE_PATH); - assert.strictEqual(actual[0].path, FIRST_ENTRY_PATH); - assert.strictEqual(actual[0].dirent.name, FIRST_FILE_PATH); - }); - it('should return entries with `stats` property', () => { - const stats = new fs_macchiato_1.Stats(); - const readdirSync = sinon.stub().returns([FIRST_FILE_PATH]); - const lstatSync = sinon.stub().returns(stats); - const settings = new settings_1.default({ - fs: { - readdirSync: readdirSync, - lstatSync: lstatSync - }, - stats: true - }); - const actual = provider.readdir(ROOT_PATH, settings); - assert.deepStrictEqual(actual[0].stats, stats); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/out/settings.d.ts b/node_modules/@nodelib/fs.scandir/out/settings.d.ts index 175f7c11..a0db1155 100644 --- a/node_modules/@nodelib/fs.scandir/out/settings.d.ts +++ b/node_modules/@nodelib/fs.scandir/out/settings.d.ts @@ -1,21 +1,20 @@ -import * as fsStat from '@nodelib/fs.stat'; -import * as fs from './adapters/fs'; -export declare type Options = { - followSymbolicLinks?: boolean; - fs?: Partial; - pathSegmentSeparator?: string; - stats?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -}; -export default class Settings { - private readonly _options; - readonly followSymbolicLinks: boolean; - readonly fs: fs.FileSystemAdapter; - readonly pathSegmentSeparator: string; - readonly stats: boolean; - readonly throwErrorOnBrokenSymbolicLink: boolean; - readonly fsStatSettings: fsStat.Settings; - constructor(_options?: Options); - private _getValue; -} -//# sourceMappingURL=settings.d.ts.map \ No newline at end of file +import * as fsStat from '@nodelib/fs.stat'; +import * as fs from './adapters/fs'; +export interface Options { + followSymbolicLinks?: boolean; + fs?: Partial; + pathSegmentSeparator?: string; + stats?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly followSymbolicLinks: boolean; + readonly fs: fs.FileSystemAdapter; + readonly pathSegmentSeparator: string; + readonly stats: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + readonly fsStatSettings: fsStat.Settings; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/@nodelib/fs.scandir/out/settings.d.ts.map b/node_modules/@nodelib/fs.scandir/out/settings.d.ts.map deleted file mode 100644 index afc1803f..00000000 --- a/node_modules/@nodelib/fs.scandir/out/settings.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"settings.d.ts","sourceRoot":"","sources":["../src/settings.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,MAAM,MAAM,kBAAkB,CAAC;AAE3C,OAAO,KAAK,EAAE,MAAM,eAAe,CAAC;AAEpC,oBAAY,OAAO,GAAG;IACrB,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,EAAE,CAAC,EAAE,OAAO,CAAC,EAAE,CAAC,iBAAiB,CAAC,CAAC;IACnC,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAC9B,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,8BAA8B,CAAC,EAAE,OAAO,CAAC;CACzC,CAAC;AAEF,MAAM,CAAC,OAAO,OAAO,QAAQ;IAahB,OAAO,CAAC,QAAQ,CAAC,QAAQ;IAZrC,SAAgB,mBAAmB,EAAE,OAAO,CAA4D;IACxG,SAAgB,EAAE,EAAE,EAAE,CAAC,iBAAiB,CAAgD;IACxF,SAAgB,oBAAoB,EAAE,MAAM,CAAgE;IAC5G,SAAgB,KAAK,EAAE,OAAO,CAA8C;IAC5E,SAAgB,8BAA8B,EAAE,OAAO,CAAsE;IAE7H,SAAgB,cAAc,EAAE,MAAM,CAAC,QAAQ,CAI5C;gBAE0B,QAAQ,GAAE,OAAY;IAEnD,OAAO,CAAC,SAAS;CAGjB"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/settings.js b/node_modules/@nodelib/fs.scandir/out/settings.js index 700d2e94..15a3e8cd 100644 --- a/node_modules/@nodelib/fs.scandir/out/settings.js +++ b/node_modules/@nodelib/fs.scandir/out/settings.js @@ -1,24 +1,24 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fsStat = require("@nodelib/fs.stat"); -const fs = require("./adapters/fs"); -class Settings { - constructor(_options = {}) { - this._options = _options; - this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); - this.fs = fs.createFileSystemAdapter(this._options.fs); - this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); - this.stats = this._getValue(this._options.stats, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); - this.fsStatSettings = new fsStat.Settings({ - followSymbolicLink: this.followSymbolicLinks, - fs: this.fs, - throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink - }); - } - _getValue(option, value) { - return option !== null && option !== void 0 ? option : value; - } -} -exports.default = Settings; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsStat = require("@nodelib/fs.stat"); +const fs = require("./adapters/fs"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); + this.fs = fs.createFileSystemAdapter(this._options.fs); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); + this.stats = this._getValue(this._options.stats, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); + this.fsStatSettings = new fsStat.Settings({ + followSymbolicLink: this.followSymbolicLinks, + fs: this.fs, + throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink + }); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/@nodelib/fs.scandir/out/settings.spec.d.ts b/node_modules/@nodelib/fs.scandir/out/settings.spec.d.ts deleted file mode 100644 index ff5bc0fd..00000000 --- a/node_modules/@nodelib/fs.scandir/out/settings.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=settings.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/settings.spec.d.ts.map b/node_modules/@nodelib/fs.scandir/out/settings.spec.d.ts.map deleted file mode 100644 index b69e1aa7..00000000 --- a/node_modules/@nodelib/fs.scandir/out/settings.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"settings.spec.d.ts","sourceRoot":"","sources":["../src/settings.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/settings.spec.js b/node_modules/@nodelib/fs.scandir/out/settings.spec.js deleted file mode 100644 index c5b442a9..00000000 --- a/node_modules/@nodelib/fs.scandir/out/settings.spec.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const path = require("path"); -const fs_macchiato_1 = require("../../fs.macchiato"); -const fs = require("./adapters/fs"); -const settings_1 = require("./settings"); -describe('Settings', () => { - it('should return instance with default values', () => { - const settings = new settings_1.default(); - assert.deepStrictEqual(settings.fs, fs.createFileSystemAdapter()); - assert.ok(!settings.followSymbolicLinks); - assert.ok(!settings.stats); - assert.strictEqual(settings.pathSegmentSeparator, path.sep); - assert.ok(settings.fsStatSettings); - assert.ok(settings.throwErrorOnBrokenSymbolicLink); - }); - it('should return instance with custom values', () => { - const lstatSync = () => new fs_macchiato_1.Stats(); - const settings = new settings_1.default({ - fs: fs.createFileSystemAdapter({ lstatSync }), - stats: true - }); - assert.deepStrictEqual(settings.fs, fs.createFileSystemAdapter({ lstatSync })); - assert.ok(settings.stats); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.d.ts b/node_modules/@nodelib/fs.scandir/out/types/index.d.ts index 50e4b8fc..f326c5e5 100644 --- a/node_modules/@nodelib/fs.scandir/out/types/index.d.ts +++ b/node_modules/@nodelib/fs.scandir/out/types/index.d.ts @@ -1,20 +1,20 @@ -/// -import * as fs from 'fs'; -export declare type Entry = { - dirent: Dirent; - name: string; - path: string; - stats?: Stats; -}; -export declare type Stats = fs.Stats; -export declare type Dirent = { - isBlockDevice(): boolean; - isCharacterDevice(): boolean; - isDirectory(): boolean; - isFIFO(): boolean; - isFile(): boolean; - isSocket(): boolean; - isSymbolicLink(): boolean; - name: string; -}; -//# sourceMappingURL=index.d.ts.map \ No newline at end of file +/// +import type * as fs from 'fs'; +export interface Entry { + dirent: Dirent; + name: string; + path: string; + stats?: Stats; +} +export declare type Stats = fs.Stats; +export declare type ErrnoException = NodeJS.ErrnoException; +export interface Dirent { + isBlockDevice: () => boolean; + isCharacterDevice: () => boolean; + isDirectory: () => boolean; + isFIFO: () => boolean; + isFile: () => boolean; + isSocket: () => boolean; + isSymbolicLink: () => boolean; + name: string; +} diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.d.ts.map b/node_modules/@nodelib/fs.scandir/out/types/index.d.ts.map deleted file mode 100644 index 4147dda4..00000000 --- a/node_modules/@nodelib/fs.scandir/out/types/index.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/types/index.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AAEzB,oBAAY,KAAK,GAAG;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,CAAC,EAAE,KAAK,CAAC;CACd,CAAC;AAEF,oBAAY,KAAK,GAAG,EAAE,CAAC,KAAK,CAAC;AAE7B,oBAAY,MAAM,GAAG;IACpB,aAAa,IAAI,OAAO,CAAC;IACzB,iBAAiB,IAAI,OAAO,CAAC;IAC7B,WAAW,IAAI,OAAO,CAAC;IACvB,MAAM,IAAI,OAAO,CAAC;IAClB,MAAM,IAAI,OAAO,CAAC;IAClB,QAAQ,IAAI,OAAO,CAAC;IACpB,cAAc,IAAI,OAAO,CAAC;IAC1B,IAAI,EAAE,MAAM,CAAC;CACb,CAAC"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.js b/node_modules/@nodelib/fs.scandir/out/types/index.js index ce03781e..c8ad2e54 100644 --- a/node_modules/@nodelib/fs.scandir/out/types/index.js +++ b/node_modules/@nodelib/fs.scandir/out/types/index.js @@ -1,2 +1,2 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts index 8aef0084..bb863f15 100644 --- a/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts +++ b/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts @@ -1,3 +1,2 @@ -import { Dirent, Stats } from '../types'; -export declare function createDirentFromStats(name: string, stats: Stats): Dirent; -//# sourceMappingURL=fs.d.ts.map \ No newline at end of file +import type { Dirent, Stats } from '../types'; +export declare function createDirentFromStats(name: string, stats: Stats): Dirent; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts.map b/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts.map deleted file mode 100644 index 57093d77..00000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"fs.d.ts","sourceRoot":"","sources":["../../src/utils/fs.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,UAAU,CAAC;AAsBzC,wBAAgB,qBAAqB,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,GAAG,MAAM,CAExE"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.js b/node_modules/@nodelib/fs.scandir/out/utils/fs.js index f15b8cf2..ace7c74d 100644 --- a/node_modules/@nodelib/fs.scandir/out/utils/fs.js +++ b/node_modules/@nodelib/fs.scandir/out/utils/fs.js @@ -1,19 +1,19 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createDirentFromStats = void 0; -class DirentFromStats { - constructor(name, stats) { - this.name = name; - this.isBlockDevice = stats.isBlockDevice.bind(stats); - this.isCharacterDevice = stats.isCharacterDevice.bind(stats); - this.isDirectory = stats.isDirectory.bind(stats); - this.isFIFO = stats.isFIFO.bind(stats); - this.isFile = stats.isFile.bind(stats); - this.isSocket = stats.isSocket.bind(stats); - this.isSymbolicLink = stats.isSymbolicLink.bind(stats); - } -} -function createDirentFromStats(name, stats) { - return new DirentFromStats(name, stats); -} -exports.createDirentFromStats = createDirentFromStats; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createDirentFromStats = void 0; +class DirentFromStats { + constructor(name, stats) { + this.name = name; + this.isBlockDevice = stats.isBlockDevice.bind(stats); + this.isCharacterDevice = stats.isCharacterDevice.bind(stats); + this.isDirectory = stats.isDirectory.bind(stats); + this.isFIFO = stats.isFIFO.bind(stats); + this.isFile = stats.isFile.bind(stats); + this.isSocket = stats.isSocket.bind(stats); + this.isSymbolicLink = stats.isSymbolicLink.bind(stats); + } +} +function createDirentFromStats(name, stats) { + return new DirentFromStats(name, stats); +} +exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.spec.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/fs.spec.d.ts deleted file mode 100644 index 2858c4bc..00000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/fs.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=fs.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.spec.d.ts.map b/node_modules/@nodelib/fs.scandir/out/utils/fs.spec.d.ts.map deleted file mode 100644 index 262a0648..00000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/fs.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"fs.spec.d.ts","sourceRoot":"","sources":["../../src/utils/fs.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.spec.js b/node_modules/@nodelib/fs.scandir/out/utils/fs.spec.js deleted file mode 100644 index ea8f426f..00000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/fs.spec.js +++ /dev/null @@ -1,20 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const fs_macchiato_1 = require("../../../fs.macchiato"); -const util = require("./fs"); -describe('Utils → FS', () => { - describe('.createDirentFromStats', () => { - it('should convert fs.Stats to fs.Dirent', () => { - const actual = util.createDirentFromStats('name', new fs_macchiato_1.Stats()); - assert.strictEqual(actual.name, 'name'); - assert.ok(!actual.isBlockDevice()); - assert.ok(!actual.isCharacterDevice()); - assert.ok(!actual.isDirectory()); - assert.ok(!actual.isFIFO()); - assert.ok(actual.isFile()); - assert.ok(!actual.isSocket()); - assert.ok(!actual.isSymbolicLink()); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts index 5ecd8948..1b41954e 100644 --- a/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts +++ b/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts @@ -1,3 +1,2 @@ -import * as fs from './fs'; -export { fs }; -//# sourceMappingURL=index.d.ts.map \ No newline at end of file +import * as fs from './fs'; +export { fs }; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts.map b/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts.map deleted file mode 100644 index 2f98e7ec..00000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/utils/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,MAAM,CAAC;AAE3B,OAAO,EACN,EAAE,EACF,CAAC"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.js b/node_modules/@nodelib/fs.scandir/out/utils/index.js index f03f4ade..f5de129f 100644 --- a/node_modules/@nodelib/fs.scandir/out/utils/index.js +++ b/node_modules/@nodelib/fs.scandir/out/utils/index.js @@ -1,5 +1,5 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.fs = void 0; -const fs = require("./fs"); -exports.fs = fs; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fs = void 0; +const fs = require("./fs"); +exports.fs = fs; diff --git a/node_modules/@nodelib/fs.scandir/package.json b/node_modules/@nodelib/fs.scandir/package.json index 38fd05ed..d3a89241 100644 --- a/node_modules/@nodelib/fs.scandir/package.json +++ b/node_modules/@nodelib/fs.scandir/package.json @@ -1,6 +1,6 @@ { "name": "@nodelib/fs.scandir", - "version": "2.1.4", + "version": "2.1.5", "description": "List files and directories inside the specified directory", "license": "MIT", "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.scandir", @@ -16,6 +16,11 @@ "engines": { "node": ">= 8" }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*" + ], "main": "out/index.js", "typings": "out/index.d.ts", "scripts": { @@ -28,8 +33,12 @@ "watch": "npm run clean && npm run compile:watch" }, "dependencies": { - "@nodelib/fs.stat": "2.0.4", + "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" }, - "gitHead": "cb5f7e893a986164c3b847a4f1faef6c54cadd68" + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4", + "@types/run-parallel": "^1.1.0" + }, + "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" } diff --git a/node_modules/@nodelib/fs.scandir/src/adapters/fs.spec.ts b/node_modules/@nodelib/fs.scandir/src/adapters/fs.spec.ts deleted file mode 100644 index e274bc03..00000000 --- a/node_modules/@nodelib/fs.scandir/src/adapters/fs.spec.ts +++ /dev/null @@ -1,31 +0,0 @@ -import * as assert from 'assert'; -import * as fs from 'fs'; - -import { Stats } from '../../../fs.macchiato'; - -import * as adapter from './fs'; - -describe('Adapters → FileSystem', () => { - it('should return original FS methods', () => { - const expected: adapter.FileSystemAdapter = adapter.FILE_SYSTEM_ADAPTER; - - const actual = adapter.createFileSystemAdapter(); - - assert.deepStrictEqual(actual, expected); - }); - - it('should return custom FS methods', () => { - const customLstatSyncMethod: typeof fs.lstatSync = () => new Stats(); - - const expected: adapter.FileSystemAdapter = { - ...adapter.FILE_SYSTEM_ADAPTER, - lstatSync: customLstatSyncMethod - }; - - const actual = adapter.createFileSystemAdapter({ - lstatSync: customLstatSyncMethod - }); - - assert.deepStrictEqual(actual, expected); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/src/adapters/fs.ts b/node_modules/@nodelib/fs.scandir/src/adapters/fs.ts deleted file mode 100644 index c604fe8a..00000000 --- a/node_modules/@nodelib/fs.scandir/src/adapters/fs.ts +++ /dev/null @@ -1,30 +0,0 @@ -import * as fs from 'fs'; - -export type FileSystemAdapter = { - lstat: typeof fs.lstat; - stat: typeof fs.stat; - lstatSync: typeof fs.lstatSync; - statSync: typeof fs.statSync; - readdir: typeof fs.readdir; - readdirSync: typeof fs.readdirSync; -}; - -export const FILE_SYSTEM_ADAPTER: FileSystemAdapter = { - lstat: fs.lstat, - stat: fs.stat, - lstatSync: fs.lstatSync, - statSync: fs.statSync, - readdir: fs.readdir, - readdirSync: fs.readdirSync -}; - -export function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter { - if (fsMethods === undefined) { - return FILE_SYSTEM_ADAPTER; - } - - return { - ...FILE_SYSTEM_ADAPTER, - ...fsMethods - }; -} diff --git a/node_modules/@nodelib/fs.scandir/src/constants.ts b/node_modules/@nodelib/fs.scandir/src/constants.ts deleted file mode 100644 index 6ce0b766..00000000 --- a/node_modules/@nodelib/fs.scandir/src/constants.ts +++ /dev/null @@ -1,15 +0,0 @@ -const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); - -const MAJOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); -const MINOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); - -const SUPPORTED_MAJOR_VERSION = 10; -const SUPPORTED_MINOR_VERSION = 10; - -const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; -const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION; - -/** - * IS `true` for Node.js 10.10 and greater. - */ -export const IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR; diff --git a/node_modules/@nodelib/fs.scandir/src/index.spec.ts b/node_modules/@nodelib/fs.scandir/src/index.spec.ts deleted file mode 100644 index ff979321..00000000 --- a/node_modules/@nodelib/fs.scandir/src/index.spec.ts +++ /dev/null @@ -1,85 +0,0 @@ -import * as assert from 'assert'; -import * as fs from 'fs'; - -import * as rimraf from 'rimraf'; - -import { scandir, scandirSync, Settings } from '.'; - -describe('Package', () => { - before(() => { - rimraf.sync('fixtures'); - - fs.mkdirSync('fixtures'); - fs.writeFileSync('fixtures/file.txt', ''); - }); - - after(() => { - rimraf.sync('fixtures'); - }); - - describe('.scandir', () => { - it('should work without options or settings', (done) => { - scandir('fixtures', (error, entries) => { - assert.strictEqual(error, null); - assert.ok(entries[0].name); - assert.ok(entries[0].path); - assert.ok(entries[0].dirent); - done(); - }); - }); - - it('should work with options', (done) => { - scandir('fixtures', { stats: true }, (error, entries) => { - assert.strictEqual(error, null); - assert.ok(entries[0].name); - assert.ok(entries[0].path); - assert.ok(entries[0].dirent); - assert.ok(entries[0].stats); - done(); - }); - }); - - it('should work with settings', (done) => { - const settings = new Settings({ stats: true }); - - scandir('fixtures', settings, (error, entries) => { - assert.strictEqual(error, null); - assert.ok(entries[0].name); - assert.ok(entries[0].path); - assert.ok(entries[0].dirent); - assert.ok(entries[0].stats); - done(); - }); - }); - }); - - describe('.scandirSync', () => { - it('should work without options or settings', () => { - const actual = scandirSync('fixtures'); - - assert.ok(actual[0].name); - assert.ok(actual[0].path); - assert.ok(actual[0].dirent); - }); - - it('should work with options', () => { - const actual = scandirSync('fixtures', { stats: true }); - - assert.ok(actual[0].name); - assert.ok(actual[0].path); - assert.ok(actual[0].dirent); - assert.ok(actual[0].stats); - }); - - it('should work with settings', () => { - const settings = new Settings({ stats: true }); - - const actual = scandirSync('fixtures', settings); - - assert.ok(actual[0].name); - assert.ok(actual[0].path); - assert.ok(actual[0].dirent); - assert.ok(actual[0].stats); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/src/index.ts b/node_modules/@nodelib/fs.scandir/src/index.ts deleted file mode 100644 index 01470407..00000000 --- a/node_modules/@nodelib/fs.scandir/src/index.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { FileSystemAdapter } from './adapters/fs'; -import * as async from './providers/async'; -import * as sync from './providers/sync'; -import Settings, { Options } from './settings'; -import { Dirent, Entry } from './types'; - -type AsyncCallback = async.AsyncCallback; - -function scandir(path: string, callback: AsyncCallback): void; -function scandir(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -function scandir(path: string, optionsOrSettingsOrCallback: Options | Settings | AsyncCallback, callback?: AsyncCallback): void { - if (typeof optionsOrSettingsOrCallback === 'function') { - return async.read(path, getSettings(), optionsOrSettingsOrCallback); - } - - async.read(path, getSettings(optionsOrSettingsOrCallback), callback as AsyncCallback); -} - -// https://github.com/typescript-eslint/typescript-eslint/issues/60 -// eslint-disable-next-line no-redeclare -declare namespace scandir { - function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; -} - -function scandirSync(path: string, optionsOrSettings?: Options | Settings): Entry[] { - const settings = getSettings(optionsOrSettings); - - return sync.read(path, settings); -} - -function getSettings(settingsOrOptions: Settings | Options = {}): Settings { - if (settingsOrOptions instanceof Settings) { - return settingsOrOptions; - } - - return new Settings(settingsOrOptions); -} - -export { - scandir, - scandirSync, - Settings, - - // https://github.com/typescript-eslint/typescript-eslint/issues/131 - // eslint-disable-next-line no-undef - AsyncCallback, - Dirent, - Entry, - FileSystemAdapter, - Options -}; diff --git a/node_modules/@nodelib/fs.scandir/src/providers/async.spec.ts b/node_modules/@nodelib/fs.scandir/src/providers/async.spec.ts deleted file mode 100644 index 8861d417..00000000 --- a/node_modules/@nodelib/fs.scandir/src/providers/async.spec.ts +++ /dev/null @@ -1,236 +0,0 @@ -import * as assert from 'assert'; -import * as fs from 'fs'; -import * as path from 'path'; - -import * as sinon from 'sinon'; - -import { Dirent, Stats } from '../../../fs.macchiato'; -import { IS_SUPPORT_READDIR_WITH_FILE_TYPES } from '../constants'; -import Settings from '../settings'; -import { Entry } from '../types'; -import * as provider from './async'; - -const ROOT_PATH = 'root'; -const FIRST_FILE_PATH = 'first.txt'; -const SECOND_FILE_PATH = 'second.txt'; -const FIRST_ENTRY_PATH = path.join(ROOT_PATH, FIRST_FILE_PATH); -const SECOND_ENTRY_PATH = path.join(ROOT_PATH, SECOND_FILE_PATH); - -describe('Providers → Async', () => { - describe('.read', () => { - it('should call correct method based on Node.js version', (done) => { - const readdir = sinon.stub(); - - readdir.yields(null, []); - - const settings = new Settings({ - fs: { readdir: readdir as unknown as typeof fs.readdir } - }); - - provider.read(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - - assert.deepStrictEqual(entries, []); - - if (IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - sinon.assert.match(readdir.args, [[ROOT_PATH, { withFileTypes: true }, sinon.match.func]]); - } else { - sinon.assert.match(readdir.args, [[ROOT_PATH, sinon.match.func]]); - } - - done(); - }); - }); - - it('should always use `readdir` method when the `stats` option is enabled', (done) => { - const readdir = sinon.stub(); - - readdir.yields(null, []); - - const settings = new Settings({ - fs: { readdir: readdir as unknown as typeof fs.readdir }, - stats: true - }); - - provider.read(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - - assert.deepStrictEqual(entries, []); - sinon.assert.match(readdir.args, [[ROOT_PATH, sinon.match.func]]); - - done(); - }); - }); - }); - - describe('.readdirWithFileTypes', () => { - it('should return entries', (done) => { - const dirent = new Dirent({ name: FIRST_FILE_PATH }); - const readdir = sinon.stub(); - - readdir.yields(null, [dirent]); - - const settings = new Settings({ - fs: { readdir: readdir as unknown as typeof fs.readdir } - }); - - const expected: Entry[] = [ - { - dirent, - name: FIRST_FILE_PATH, - path: FIRST_ENTRY_PATH - } - ]; - - provider.readdirWithFileTypes(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - - sinon.assert.match(readdir.args, [[ROOT_PATH, { withFileTypes: true }, sinon.match.func]]); - assert.deepStrictEqual(entries, expected); - - done(); - }); - }); - - it('should call fs.stat for symbolic link when the "followSymbolicLink" option is enabled', (done) => { - const firstDirent = new Dirent({ name: FIRST_FILE_PATH }); - const secondDirent = new Dirent({ name: SECOND_FILE_PATH, isSymbolicLink: true }); - const stats = new Stats(); - - const readdir = sinon.stub(); - const stat = sinon.stub(); - - readdir.yields(null, [firstDirent, secondDirent]); - stat.yields(null, stats); - - const settings = new Settings({ - followSymbolicLinks: true, - fs: { - readdir: readdir as unknown as typeof fs.readdir, - stat: stat as unknown as typeof fs.stat - } - }); - - provider.readdirWithFileTypes(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - - assert.strictEqual(entries.length, 2); - assert.ok(!entries[1].dirent.isSymbolicLink()); - sinon.assert.match(stat.args, [[SECOND_ENTRY_PATH, sinon.match.func]]); - - done(); - }); - }); - - it('should return lstat for broken symbolic link when the "throwErrorOnBrokenSymbolicLink" option is disabled', (done) => { - const firstDirent = new Dirent({ name: FIRST_FILE_PATH, isSymbolicLink: true }); - - const readdir = sinon.stub(); - const stat = sinon.stub(); - - readdir.yields(null, [firstDirent]); - stat.yields(new Error('error')); - - const settings = new Settings({ - followSymbolicLinks: true, - throwErrorOnBrokenSymbolicLink: false, - fs: { - readdir: readdir as unknown as typeof fs.readdir, - stat: stat as unknown as typeof fs.stat - } - }); - - provider.readdirWithFileTypes(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - - assert.strictEqual(entries.length, 1); - assert.ok(entries[0].dirent.isSymbolicLink()); - - done(); - }); - }); - - it('should throw an error fro broken symbolic link when the "throwErrorOnBrokenSymbolicLink" option is enabled', (done) => { - const firstDirent = new Dirent({ name: FIRST_FILE_PATH, isSymbolicLink: true }); - - const readdir = sinon.stub(); - const stat = sinon.stub(); - - readdir.yields(null, [firstDirent]); - stat.yields(new Error('error')); - - const settings = new Settings({ - followSymbolicLinks: true, - throwErrorOnBrokenSymbolicLink: true, - fs: { - readdir: readdir as unknown as typeof fs.readdir, - stat: stat as unknown as typeof fs.stat - } - }); - - provider.readdirWithFileTypes(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error.message, 'error'); - assert.strictEqual(entries, undefined); - - done(); - }); - }); - }); - - describe('.readdir', () => { - it('should return entries', (done) => { - const stats = new Stats(); - - const readdir = sinon.stub(); - const lstat = sinon.stub(); - - readdir.yields(null, [FIRST_FILE_PATH]); - lstat.yields(null, stats); - - const settings = new Settings({ - fs: { - readdir: readdir as unknown as typeof fs.readdir, - lstat: lstat as unknown as typeof fs.lstat - } - }); - - provider.readdir(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - - sinon.assert.match(readdir.args, [[ROOT_PATH, sinon.match.func]]); - sinon.assert.match(lstat.args, [[FIRST_ENTRY_PATH, sinon.match.func]]); - - assert.strictEqual(entries[0].name, FIRST_FILE_PATH); - assert.strictEqual(entries[0].path, FIRST_ENTRY_PATH); - assert.strictEqual(entries[0].dirent.name, FIRST_FILE_PATH); - - done(); - }); - }); - - it('should return entries with `stats` property', (done) => { - const stats = new Stats(); - - const readdir = sinon.stub(); - const lstat = sinon.stub(); - - readdir.yields(null, [FIRST_FILE_PATH]); - lstat.yields(null, stats); - - const settings = new Settings({ - fs: { - readdir: readdir as unknown as typeof fs.readdir, - lstat: lstat as unknown as typeof fs.lstat - }, - stats: true - }); - - provider.readdir(ROOT_PATH, settings, (error, entries) => { - assert.strictEqual(error, null); - assert.deepStrictEqual(entries[0].stats, stats); - - done(); - }); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/src/providers/async.ts b/node_modules/@nodelib/fs.scandir/src/providers/async.ts deleted file mode 100644 index 1ab93a20..00000000 --- a/node_modules/@nodelib/fs.scandir/src/providers/async.ts +++ /dev/null @@ -1,121 +0,0 @@ -import * as fsStat from '@nodelib/fs.stat'; -import * as rpl from 'run-parallel'; - -import { IS_SUPPORT_READDIR_WITH_FILE_TYPES } from '../constants'; -import Settings from '../settings'; -import { Entry, Stats } from '../types'; -import * as utils from '../utils'; -import * as common from './common'; - -type RplTaskStats = rpl.Task; -type RplTaskEntry = rpl.Task; -type FailureCallback = (err: NodeJS.ErrnoException) => void; -type SuccessCallback = (err: null, entries: Entry[]) => void; - -export type AsyncCallback = (err: NodeJS.ErrnoException, entries: Entry[]) => void; - -export function read(directory: string, settings: Settings, callback: AsyncCallback): void { - if (!settings.stats && IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - return readdirWithFileTypes(directory, settings, callback); - } - - return readdir(directory, settings, callback); -} - -export function readdirWithFileTypes(directory: string, settings: Settings, callback: AsyncCallback): void { - settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { - if (readdirError !== null) { - return callFailureCallback(callback, readdirError); - } - - const entries: Entry[] = dirents.map((dirent) => ({ - dirent, - name: dirent.name, - path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) - })); - - if (!settings.followSymbolicLinks) { - return callSuccessCallback(callback, entries); - } - - const tasks: RplTaskEntry[] = entries.map((entry) => makeRplTaskEntry(entry, settings)); - - rpl(tasks, (rplError: Error | null, rplEntries) => { - if (rplError !== null) { - return callFailureCallback(callback, rplError); - } - - callSuccessCallback(callback, rplEntries); - }); - }); -} - -function makeRplTaskEntry(entry: Entry, settings: Settings): RplTaskEntry { - return (done) => { - if (!entry.dirent.isSymbolicLink()) { - return done(null, entry); - } - - settings.fs.stat(entry.path, (statError, stats) => { - if (statError !== null) { - if (settings.throwErrorOnBrokenSymbolicLink) { - return done(statError); - } - - return done(null, entry); - } - - entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); - - return done(null, entry); - }); - }; -} - -export function readdir(directory: string, settings: Settings, callback: AsyncCallback): void { - settings.fs.readdir(directory, (readdirError, names) => { - if (readdirError !== null) { - return callFailureCallback(callback, readdirError); - } - - const filepaths = names.map((name) => common.joinPathSegments(directory, name, settings.pathSegmentSeparator)); - - const tasks: RplTaskStats[] = filepaths.map((filepath): RplTaskStats => { - return (done) => fsStat.stat(filepath, settings.fsStatSettings, done); - }); - - rpl(tasks, (rplError: Error | null, results) => { - if (rplError !== null) { - return callFailureCallback(callback, rplError); - } - - const entries: Entry[] = []; - - names.forEach((name, index) => { - const stats = results[index]; - - const entry: Entry = { - name, - path: filepaths[index], - dirent: utils.fs.createDirentFromStats(name, stats) - }; - - if (settings.stats) { - entry.stats = stats; - } - - entries.push(entry); - }); - - callSuccessCallback(callback, entries); - }); - }); -} - -function callFailureCallback(callback: AsyncCallback, error: NodeJS.ErrnoException): void { - (callback as FailureCallback)(error); -} - -function callSuccessCallback(callback: AsyncCallback, result: Entry[]): void { - (callback as unknown as SuccessCallback)(null, result); -} diff --git a/node_modules/@nodelib/fs.scandir/src/providers/common.spec.ts b/node_modules/@nodelib/fs.scandir/src/providers/common.spec.ts deleted file mode 100644 index 4c4b3986..00000000 --- a/node_modules/@nodelib/fs.scandir/src/providers/common.spec.ts +++ /dev/null @@ -1,24 +0,0 @@ -import * as assert from 'assert'; - -import * as common from './common'; - -describe('Readers → Common', () => { - describe('.joinPathSegments', () => { - it('should return concatenated string', () => { - assert.strictEqual(common.joinPathSegments('.', 'a', '/'), './a'); - }); - - it('should return correct string when the first segment ens with the separator symbol', () => { - // Unix - assert.strictEqual(common.joinPathSegments('/', 'a', '/'), '/a'); - assert.strictEqual(common.joinPathSegments('//', 'a', '/'), '//a'); - assert.strictEqual(common.joinPathSegments('/a/', 'b', '/'), '/a/b'); - - // Windows - assert.strictEqual(common.joinPathSegments('C:/', 'Users', '/'), 'C:/Users'); - assert.strictEqual(common.joinPathSegments('C:\\', 'Users', '\\'), 'C:\\Users'); - assert.strictEqual(common.joinPathSegments('//?/C:/', 'Users', '/'), '//?/C:/Users'); - assert.strictEqual(common.joinPathSegments('\\\\?\\C:\\', 'Users', '\\'), '\\\\?\\C:\\Users'); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/src/providers/common.ts b/node_modules/@nodelib/fs.scandir/src/providers/common.ts deleted file mode 100644 index e13e8060..00000000 --- a/node_modules/@nodelib/fs.scandir/src/providers/common.ts +++ /dev/null @@ -1,10 +0,0 @@ -export function joinPathSegments(a: string, b: string, separator: string): string { - /** - * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). - */ - if (a.endsWith(separator)) { - return a + b; - } - - return a + separator + b; -} diff --git a/node_modules/@nodelib/fs.scandir/src/providers/sync.spec.ts b/node_modules/@nodelib/fs.scandir/src/providers/sync.spec.ts deleted file mode 100644 index 4b7f9b4e..00000000 --- a/node_modules/@nodelib/fs.scandir/src/providers/sync.spec.ts +++ /dev/null @@ -1,186 +0,0 @@ -import * as assert from 'assert'; -import * as fs from 'fs'; -import * as path from 'path'; - -import * as sinon from 'sinon'; - -import { Dirent, Stats } from '../../../fs.macchiato'; -import { IS_SUPPORT_READDIR_WITH_FILE_TYPES } from '../constants'; -import Settings from '../settings'; -import { Entry } from '../types'; -import * as provider from './sync'; - -const ROOT_PATH = 'root'; -const FIRST_FILE_PATH = 'first.txt'; -const SECOND_FILE_PATH = 'second.txt'; -const FIRST_ENTRY_PATH = path.join(ROOT_PATH, FIRST_FILE_PATH); -const SECOND_ENTRY_PATH = path.join(ROOT_PATH, SECOND_FILE_PATH); - -describe('Providers → Sync', () => { - describe('.read', () => { - it('should call correct method based on Node.js version', () => { - const readdirSync = sinon.stub().returns([]); - - const settings = new Settings({ - fs: { readdirSync: readdirSync as unknown as typeof fs.readdirSync } - }); - - const actual = provider.read(ROOT_PATH, settings); - - assert.deepStrictEqual(actual, []); - - if (IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - assert.deepStrictEqual(readdirSync.args, [[ROOT_PATH, { withFileTypes: true }]]); - } else { - assert.deepStrictEqual(readdirSync.args, [[ROOT_PATH]]); - } - }); - - it('should always use `readdir` method when the `stats` option is enabled', () => { - const readdirSync = sinon.stub().returns([]); - - const settings = new Settings({ - fs: { readdirSync: readdirSync as unknown as typeof fs.readdirSync }, - stats: true - }); - - provider.read(ROOT_PATH, settings); - - assert.deepStrictEqual(readdirSync.args, [[ROOT_PATH]]); - }); - }); - - describe('.readdirWithFileTypes', () => { - it('should return entries', () => { - const dirent = new Dirent({ name: FIRST_FILE_PATH }); - const readdirSync = sinon.stub().returns([dirent]); - - const settings = new Settings({ - fs: { readdirSync: readdirSync as unknown as typeof fs.readdirSync } - }); - - const expected: Entry[] = [ - { - dirent, - name: FIRST_FILE_PATH, - path: FIRST_ENTRY_PATH - } - ]; - - const actual = provider.readdirWithFileTypes(ROOT_PATH, settings); - - assert.deepStrictEqual(readdirSync.args, [[ROOT_PATH, { withFileTypes: true }]]); - assert.deepStrictEqual(actual, expected); - }); - - it('should call fs.stat for symbolic link when the "followSymbolicLink" option is enabled', () => { - const firstDirent = new Dirent({ name: FIRST_FILE_PATH }); - const secondDirent = new Dirent({ name: SECOND_FILE_PATH, isSymbolicLink: true }); - const stats = new Stats(); - - const readdirSync = sinon.stub().returns([firstDirent, secondDirent]); - const statSync = sinon.stub().returns(stats); - - const settings = new Settings({ - followSymbolicLinks: true, - fs: { - readdirSync: readdirSync as unknown as typeof fs.readdirSync, - statSync: statSync as unknown as typeof fs.statSync - } - }); - - const actual = provider.readdirWithFileTypes(ROOT_PATH, settings); - - assert.strictEqual(actual.length, 2); - assert.deepStrictEqual(statSync.args, [[SECOND_ENTRY_PATH]]); - assert.ok(!actual[1].dirent.isSymbolicLink()); - }); - - it('should return lstat for broken symbolic link when the "throwErrorOnBrokenSymbolicLink" option is disabled', () => { - const dirent = new Dirent({ name: FIRST_FILE_PATH, isSymbolicLink: true }); - - const readdirSync = sinon.stub().returns([dirent]); - const statSync = (): never => { - throw new Error('error'); - }; - - const settings = new Settings({ - followSymbolicLinks: true, - throwErrorOnBrokenSymbolicLink: false, - fs: { - readdirSync: readdirSync as unknown as typeof fs.readdirSync, - statSync: statSync as unknown as typeof fs.statSync - } - }); - - const actual = provider.readdirWithFileTypes(ROOT_PATH, settings); - - assert.strictEqual(actual.length, 1); - }); - - it('should throw an error fro broken symbolic link when the "throwErrorOnBrokenSymbolicLink" option is enabled', () => { - const dirent = new Dirent({ name: FIRST_FILE_PATH, isSymbolicLink: true }); - - const readdirSync = sinon.stub().returns([dirent]); - const statSync = (): never => { - throw new Error('error'); - }; - - const settings = new Settings({ - followSymbolicLinks: true, - throwErrorOnBrokenSymbolicLink: true, - fs: { - readdirSync: readdirSync as unknown as typeof fs.readdirSync, - statSync: statSync as unknown as typeof fs.statSync - } - }); - - const expectedErrorMessageRe = /Error: error/; - - assert.throws(() => provider.readdirWithFileTypes(ROOT_PATH, settings), expectedErrorMessageRe); - }); - }); - - describe('.readdir', () => { - it('should return entries', () => { - const stats = new Stats(); - - const readdirSync = sinon.stub().returns([FIRST_FILE_PATH]); - const lstatSync = sinon.stub().returns(stats); - - const settings = new Settings({ - fs: { - readdirSync: readdirSync as unknown as typeof fs.readdirSync, - lstatSync: lstatSync as unknown as typeof fs.lstatSync - } - }); - - const actual = provider.readdir(ROOT_PATH, settings); - - assert.deepStrictEqual(readdirSync.args, [[ROOT_PATH]]); - - assert.strictEqual(actual[0].name, FIRST_FILE_PATH); - assert.strictEqual(actual[0].path, FIRST_ENTRY_PATH); - assert.strictEqual(actual[0].dirent.name, FIRST_FILE_PATH); - }); - - it('should return entries with `stats` property', () => { - const stats = new Stats(); - - const readdirSync = sinon.stub().returns([FIRST_FILE_PATH]); - const lstatSync = sinon.stub().returns(stats); - - const settings = new Settings({ - fs: { - readdirSync: readdirSync as unknown as typeof fs.readdirSync, - lstatSync: lstatSync as unknown as typeof fs.lstatSync - }, - stats: true - }); - - const actual = provider.readdir(ROOT_PATH, settings); - - assert.deepStrictEqual(actual[0].stats, stats); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/src/providers/sync.ts b/node_modules/@nodelib/fs.scandir/src/providers/sync.ts deleted file mode 100644 index 5a15c0f7..00000000 --- a/node_modules/@nodelib/fs.scandir/src/providers/sync.ts +++ /dev/null @@ -1,62 +0,0 @@ -import * as fsStat from '@nodelib/fs.stat'; - -import { IS_SUPPORT_READDIR_WITH_FILE_TYPES } from '../constants'; -import Settings from '../settings'; -import { Entry } from '../types'; -import * as utils from '../utils'; -import * as common from './common'; - -export function read(directory: string, settings: Settings): Entry[] { - if (!settings.stats && IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - return readdirWithFileTypes(directory, settings); - } - - return readdir(directory, settings); -} - -export function readdirWithFileTypes(directory: string, settings: Settings): Entry[] { - const dirents = settings.fs.readdirSync(directory, { withFileTypes: true }); - - return dirents.map((dirent) => { - const entry: Entry = { - dirent, - name: dirent.name, - path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) - }; - - if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) { - try { - const stats = settings.fs.statSync(entry.path); - - entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); - } catch (error) { - if (settings.throwErrorOnBrokenSymbolicLink) { - throw error; - } - } - } - - return entry; - }); -} - -export function readdir(directory: string, settings: Settings): Entry[] { - const names = settings.fs.readdirSync(directory); - - return names.map((name) => { - const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); - const stats = fsStat.statSync(entryPath, settings.fsStatSettings); - - const entry: Entry = { - name, - path: entryPath, - dirent: utils.fs.createDirentFromStats(name, stats) - }; - - if (settings.stats) { - entry.stats = stats; - } - - return entry; - }); -} diff --git a/node_modules/@nodelib/fs.scandir/src/settings.spec.ts b/node_modules/@nodelib/fs.scandir/src/settings.spec.ts deleted file mode 100644 index 5dd9894b..00000000 --- a/node_modules/@nodelib/fs.scandir/src/settings.spec.ts +++ /dev/null @@ -1,32 +0,0 @@ -import * as assert from 'assert'; -import * as path from 'path'; - -import { Stats } from '../../fs.macchiato'; - -import * as fs from './adapters/fs'; -import Settings from './settings'; - -describe('Settings', () => { - it('should return instance with default values', () => { - const settings = new Settings(); - - assert.deepStrictEqual(settings.fs, fs.createFileSystemAdapter()); - assert.ok(!settings.followSymbolicLinks); - assert.ok(!settings.stats); - assert.strictEqual(settings.pathSegmentSeparator, path.sep); - assert.ok(settings.fsStatSettings); - assert.ok(settings.throwErrorOnBrokenSymbolicLink); - }); - - it('should return instance with custom values', () => { - const lstatSync = (): Stats => new Stats(); - - const settings = new Settings({ - fs: fs.createFileSystemAdapter({ lstatSync }), - stats: true - }); - - assert.deepStrictEqual(settings.fs, fs.createFileSystemAdapter({ lstatSync })); - assert.ok(settings.stats); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/src/settings.ts b/node_modules/@nodelib/fs.scandir/src/settings.ts deleted file mode 100644 index 7e8dd3bb..00000000 --- a/node_modules/@nodelib/fs.scandir/src/settings.ts +++ /dev/null @@ -1,33 +0,0 @@ -import * as path from 'path'; - -import * as fsStat from '@nodelib/fs.stat'; - -import * as fs from './adapters/fs'; - -export type Options = { - followSymbolicLinks?: boolean; - fs?: Partial; - pathSegmentSeparator?: string; - stats?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -}; - -export default class Settings { - public readonly followSymbolicLinks: boolean = this._getValue(this._options.followSymbolicLinks, false); - public readonly fs: fs.FileSystemAdapter = fs.createFileSystemAdapter(this._options.fs); - public readonly pathSegmentSeparator: string = this._getValue(this._options.pathSegmentSeparator, path.sep); - public readonly stats: boolean = this._getValue(this._options.stats, false); - public readonly throwErrorOnBrokenSymbolicLink: boolean = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); - - public readonly fsStatSettings: fsStat.Settings = new fsStat.Settings({ - followSymbolicLink: this.followSymbolicLinks, - fs: this.fs, - throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink - }); - - constructor(private readonly _options: Options = {}) { } - - private _getValue(option: T | undefined, value: T): T { - return option ?? value; - } -} diff --git a/node_modules/@nodelib/fs.scandir/src/types/index.ts b/node_modules/@nodelib/fs.scandir/src/types/index.ts deleted file mode 100644 index 5a20ffb7..00000000 --- a/node_modules/@nodelib/fs.scandir/src/types/index.ts +++ /dev/null @@ -1,21 +0,0 @@ -import * as fs from 'fs'; - -export type Entry = { - dirent: Dirent; - name: string; - path: string; - stats?: Stats; -}; - -export type Stats = fs.Stats; - -export type Dirent = { - isBlockDevice(): boolean; - isCharacterDevice(): boolean; - isDirectory(): boolean; - isFIFO(): boolean; - isFile(): boolean; - isSocket(): boolean; - isSymbolicLink(): boolean; - name: string; -}; diff --git a/node_modules/@nodelib/fs.scandir/src/utils/fs.spec.ts b/node_modules/@nodelib/fs.scandir/src/utils/fs.spec.ts deleted file mode 100644 index a47760a4..00000000 --- a/node_modules/@nodelib/fs.scandir/src/utils/fs.spec.ts +++ /dev/null @@ -1,21 +0,0 @@ -import * as assert from 'assert'; - -import { Stats } from '../../../fs.macchiato'; -import * as util from './fs'; - -describe('Utils → FS', () => { - describe('.createDirentFromStats', () => { - it('should convert fs.Stats to fs.Dirent', () => { - const actual = util.createDirentFromStats('name', new Stats()); - - assert.strictEqual(actual.name, 'name'); - assert.ok(!actual.isBlockDevice()); - assert.ok(!actual.isCharacterDevice()); - assert.ok(!actual.isDirectory()); - assert.ok(!actual.isFIFO()); - assert.ok(actual.isFile()); - assert.ok(!actual.isSocket()); - assert.ok(!actual.isSymbolicLink()); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.scandir/src/utils/fs.ts b/node_modules/@nodelib/fs.scandir/src/utils/fs.ts deleted file mode 100644 index 645f4a36..00000000 --- a/node_modules/@nodelib/fs.scandir/src/utils/fs.ts +++ /dev/null @@ -1,27 +0,0 @@ -import * as fs from 'fs'; - -import { Dirent, Stats } from '../types'; - -class DirentFromStats implements fs.Dirent { - public isBlockDevice: Stats['isBlockDevice']; - public isCharacterDevice: Stats['isCharacterDevice']; - public isDirectory: Stats['isDirectory']; - public isFIFO: Stats['isFIFO']; - public isFile: Stats['isFile']; - public isSocket: Stats['isSocket']; - public isSymbolicLink: Stats['isSymbolicLink']; - - constructor(public name: string, stats: Stats) { - this.isBlockDevice = stats.isBlockDevice.bind(stats); - this.isCharacterDevice = stats.isCharacterDevice.bind(stats); - this.isDirectory = stats.isDirectory.bind(stats); - this.isFIFO = stats.isFIFO.bind(stats); - this.isFile = stats.isFile.bind(stats); - this.isSocket = stats.isSocket.bind(stats); - this.isSymbolicLink = stats.isSymbolicLink.bind(stats); - } -} - -export function createDirentFromStats(name: string, stats: Stats): Dirent { - return new DirentFromStats(name, stats); -} diff --git a/node_modules/@nodelib/fs.scandir/src/utils/index.ts b/node_modules/@nodelib/fs.scandir/src/utils/index.ts deleted file mode 100644 index 6fa8f6de..00000000 --- a/node_modules/@nodelib/fs.scandir/src/utils/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import * as fs from './fs'; - -export { - fs -}; diff --git a/node_modules/@nodelib/fs.scandir/tsconfig.json b/node_modules/@nodelib/fs.scandir/tsconfig.json deleted file mode 100644 index 11e723f0..00000000 --- a/node_modules/@nodelib/fs.scandir/tsconfig.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "extends": "../../../tsconfig.json", - "compilerOptions": { - "rootDir": "src", - "outDir": "out" - }, - "references": [ - { - "path": "../fs.macchiato" - }, - { - "path": "../fs.stat" - } - ] -} diff --git a/node_modules/@nodelib/fs.scandir/tsconfig.tsbuildinfo b/node_modules/@nodelib/fs.scandir/tsconfig.tsbuildinfo deleted file mode 100644 index d6a07581..00000000 --- a/node_modules/@nodelib/fs.scandir/tsconfig.tsbuildinfo +++ /dev/null @@ -1,1720 +0,0 @@ -{ - "program": { - "fileInfos": { - "../../../node_modules/typescript/lib/lib.es5.d.ts": { - "version": "70ae6416528e68c2ee7b62892200d2ca631759943d4429f8b779b947ff1e124d", - "signature": "70ae6416528e68c2ee7b62892200d2ca631759943d4429f8b779b947ff1e124d", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.d.ts": { - "version": "dc47c4fa66b9b9890cf076304de2a9c5201e94b740cffdf09f87296d877d71f6", - "signature": "dc47c4fa66b9b9890cf076304de2a9c5201e94b740cffdf09f87296d877d71f6", - "affectsGlobalScope": false - }, - "../../../node_modules/typescript/lib/lib.es2016.d.ts": { - "version": "7a387c58583dfca701b6c85e0adaf43fb17d590fb16d5b2dc0a2fbd89f35c467", - "signature": "7a387c58583dfca701b6c85e0adaf43fb17d590fb16d5b2dc0a2fbd89f35c467", - "affectsGlobalScope": false - }, - "../../../node_modules/typescript/lib/lib.es2017.d.ts": { - "version": "8a12173c586e95f4433e0c6dc446bc88346be73ffe9ca6eec7aa63c8f3dca7f9", - "signature": "8a12173c586e95f4433e0c6dc446bc88346be73ffe9ca6eec7aa63c8f3dca7f9", - "affectsGlobalScope": false - }, - "../../../node_modules/typescript/lib/lib.es2018.d.ts": { - "version": "5f4e733ced4e129482ae2186aae29fde948ab7182844c3a5a51dd346182c7b06", - "signature": "5f4e733ced4e129482ae2186aae29fde948ab7182844c3a5a51dd346182c7b06", - "affectsGlobalScope": false - }, - "../../../node_modules/typescript/lib/lib.dom.d.ts": { - "version": "9affb0a2ddc57df5b8174c0af96c288d697a262e5bc9ca1f544c999dc64a91e6", - "signature": "9affb0a2ddc57df5b8174c0af96c288d697a262e5bc9ca1f544c999dc64a91e6", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.dom.iterable.d.ts": { - "version": "fb0c09b697dc42afa84d1587e3c994a2f554d2a45635e4f0618768d16a86b69a", - "signature": "fb0c09b697dc42afa84d1587e3c994a2f554d2a45635e4f0618768d16a86b69a", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.webworker.importscripts.d.ts": { - "version": "7fac8cb5fc820bc2a59ae11ef1c5b38d3832c6d0dfaec5acdb5569137d09a481", - "signature": "7fac8cb5fc820bc2a59ae11ef1c5b38d3832c6d0dfaec5acdb5569137d09a481", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.scripthost.d.ts": { - "version": "097a57355ded99c68e6df1b738990448e0bf170e606707df5a7c0481ff2427cd", - "signature": "097a57355ded99c68e6df1b738990448e0bf170e606707df5a7c0481ff2427cd", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.core.d.ts": { - "version": "63e0cc12d0f77394094bd19e84464f9840af0071e5b9358ced30511efef1d8d2", - "signature": "63e0cc12d0f77394094bd19e84464f9840af0071e5b9358ced30511efef1d8d2", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.collection.d.ts": { - "version": "43fb1d932e4966a39a41b464a12a81899d9ae5f2c829063f5571b6b87e6d2f9c", - "signature": "43fb1d932e4966a39a41b464a12a81899d9ae5f2c829063f5571b6b87e6d2f9c", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.generator.d.ts": { - "version": "cdccba9a388c2ee3fd6ad4018c640a471a6c060e96f1232062223063b0a5ac6a", - "signature": "cdccba9a388c2ee3fd6ad4018c640a471a6c060e96f1232062223063b0a5ac6a", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.iterable.d.ts": { - "version": "42f5e41e5893da663dbf0394268f54f1da4b43dc0ddd2ea4bf471fe5361d6faf", - "signature": "42f5e41e5893da663dbf0394268f54f1da4b43dc0ddd2ea4bf471fe5361d6faf", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.promise.d.ts": { - "version": "0b7a905675e6cb4211c128f0a3aa47d414b275180a299a9aad5d3ec298abbfc4", - "signature": "0b7a905675e6cb4211c128f0a3aa47d414b275180a299a9aad5d3ec298abbfc4", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.proxy.d.ts": { - "version": "dfff68b3c34338f6b307a25d4566de15eed7973b0dc5d69f9fde2bcac1c25315", - "signature": "dfff68b3c34338f6b307a25d4566de15eed7973b0dc5d69f9fde2bcac1c25315", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.reflect.d.ts": { - "version": "cb609802a8698aa28b9c56331d4b53f590ca3c1c3a255350304ae3d06017779d", - "signature": "cb609802a8698aa28b9c56331d4b53f590ca3c1c3a255350304ae3d06017779d", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.symbol.d.ts": { - "version": "3013574108c36fd3aaca79764002b3717da09725a36a6fc02eac386593110f93", - "signature": "3013574108c36fd3aaca79764002b3717da09725a36a6fc02eac386593110f93", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.symbol.wellknown.d.ts": { - "version": "4670208dd7da9d6c774ab1b75c1527a810388c7989c4905de6aaea8561cb9dce", - "signature": "4670208dd7da9d6c774ab1b75c1527a810388c7989c4905de6aaea8561cb9dce", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2016.array.include.d.ts": { - "version": "3be5a1453daa63e031d266bf342f3943603873d890ab8b9ada95e22389389006", - "signature": "3be5a1453daa63e031d266bf342f3943603873d890ab8b9ada95e22389389006", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.object.d.ts": { - "version": "17bb1fc99591b00515502d264fa55dc8370c45c5298f4a5c2083557dccba5a2a", - "signature": "17bb1fc99591b00515502d264fa55dc8370c45c5298f4a5c2083557dccba5a2a", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.sharedmemory.d.ts": { - "version": "d0db416bccdb33975548baf09a42ee8c47eace1aac7907351a000f1e568e7232", - "signature": "d0db416bccdb33975548baf09a42ee8c47eace1aac7907351a000f1e568e7232", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.string.d.ts": { - "version": "6a6b173e739a6a99629a8594bfb294cc7329bfb7b227f12e1f7c11bc163b8577", - "signature": "6a6b173e739a6a99629a8594bfb294cc7329bfb7b227f12e1f7c11bc163b8577", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.intl.d.ts": { - "version": "12a310447c5d23c7d0d5ca2af606e3bd08afda69100166730ab92c62999ebb9d", - "signature": "12a310447c5d23c7d0d5ca2af606e3bd08afda69100166730ab92c62999ebb9d", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.typedarrays.d.ts": { - "version": "b0124885ef82641903d232172577f2ceb5d3e60aed4da1153bab4221e1f6dd4e", - "signature": "b0124885ef82641903d232172577f2ceb5d3e60aed4da1153bab4221e1f6dd4e", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.asyncgenerator.d.ts": { - "version": "0eb85d6c590b0d577919a79e0084fa1744c1beba6fd0d4e951432fa1ede5510a", - "signature": "0eb85d6c590b0d577919a79e0084fa1744c1beba6fd0d4e951432fa1ede5510a", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.asynciterable.d.ts": { - "version": "a40c4d82bf13fcded295ac29f354eb7d40249613c15e07b53f2fc75e45e16359", - "signature": "a40c4d82bf13fcded295ac29f354eb7d40249613c15e07b53f2fc75e45e16359", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.intl.d.ts": { - "version": "df9c8a72ca8b0ed62f5470b41208a0587f0f73f0a7db28e5a1272cf92537518e", - "signature": "df9c8a72ca8b0ed62f5470b41208a0587f0f73f0a7db28e5a1272cf92537518e", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.promise.d.ts": { - "version": "bb2d3fb05a1d2ffbca947cc7cbc95d23e1d053d6595391bd325deb265a18d36c", - "signature": "bb2d3fb05a1d2ffbca947cc7cbc95d23e1d053d6595391bd325deb265a18d36c", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.regexp.d.ts": { - "version": "c80df75850fea5caa2afe43b9949338ce4e2de086f91713e9af1a06f973872b8", - "signature": "c80df75850fea5caa2afe43b9949338ce4e2de086f91713e9af1a06f973872b8", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2020.bigint.d.ts": { - "version": "4f435f794b7853c55e2ae7cff6206025802aa79232d2867544178f2ca8ff5eaa", - "signature": "4f435f794b7853c55e2ae7cff6206025802aa79232d2867544178f2ca8ff5eaa", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.esnext.intl.d.ts": { - "version": "89bf2b7a601b73ea4311eda9c41f86a58994fec1bee3b87c4a14d68d9adcdcbd", - "signature": "89bf2b7a601b73ea4311eda9c41f86a58994fec1bee3b87c4a14d68d9adcdcbd", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.full.d.ts": { - "version": "d2f31f19e1ba6ed59be9259d660a239d9a3fcbbc8e038c6b2009bde34b175fed", - "signature": "d2f31f19e1ba6ed59be9259d660a239d9a3fcbbc8e038c6b2009bde34b175fed", - "affectsGlobalScope": false - }, - "./src/constants.ts": { - "version": "e987ef952c80874e93b84cb440b66b0f2ada121cd05e8dab4954bba9c594f0a9", - "signature": "30834002ccdbc21abcb61f35461de0d03d146ba450e7b60873cb57c1d79b59f3", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/globals.d.ts": { - "version": "74d61a149bea97a20b324410e4520796ffc36dcf35b54f03cfd0cfe922bb61cc", - "signature": "74d61a149bea97a20b324410e4520796ffc36dcf35b54f03cfd0cfe922bb61cc", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/node/async_hooks.d.ts": { - "version": "950e73fe3bcda768b5f593cec3f7137bb7cab709a82be89dd08c2a20568a28e2", - "signature": "950e73fe3bcda768b5f593cec3f7137bb7cab709a82be89dd08c2a20568a28e2", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/buffer.d.ts": { - "version": "61215c1a376bbe8f51cab4cc4ddbf3746387015113c37a84d981d4738c21b878", - "signature": "61215c1a376bbe8f51cab4cc4ddbf3746387015113c37a84d981d4738c21b878", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/child_process.d.ts": { - "version": "5eca801fb67009c5728b88793670f0137b5e31a8f7d1576d5110a1276feaba8c", - "signature": "5eca801fb67009c5728b88793670f0137b5e31a8f7d1576d5110a1276feaba8c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/cluster.d.ts": { - "version": "ce629710e5e58724902b753212e97861fd73e2aa09f5d88cb6d55dc763cf8c8a", - "signature": "ce629710e5e58724902b753212e97861fd73e2aa09f5d88cb6d55dc763cf8c8a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/console.d.ts": { - "version": "525c8fc510d9632d2a0a9de2d41c3ac1cdd79ff44d3b45c6d81cacabb683528d", - "signature": "525c8fc510d9632d2a0a9de2d41c3ac1cdd79ff44d3b45c6d81cacabb683528d", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/constants.d.ts": { - "version": "0279383034fae92db8097d0a41350293553599cc9c3c917b60e2542d0dfcbd44", - "signature": "0279383034fae92db8097d0a41350293553599cc9c3c917b60e2542d0dfcbd44", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/crypto.d.ts": { - "version": "9b9f8b151698fb1798f04b8375e240c764f094e730192e6a5353abdb1c709d6f", - "signature": "9b9f8b151698fb1798f04b8375e240c764f094e730192e6a5353abdb1c709d6f", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/dgram.d.ts": { - "version": "37c4598a5f2025c97492e18bed8909ccd10bf26bb5f54d5f6009f9153291af91", - "signature": "37c4598a5f2025c97492e18bed8909ccd10bf26bb5f54d5f6009f9153291af91", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/dns.d.ts": { - "version": "ef226a42de7022eacdfa0f15aabf73b46c47af93044c8ebfab8aa8e3cf6c330c", - "signature": "ef226a42de7022eacdfa0f15aabf73b46c47af93044c8ebfab8aa8e3cf6c330c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/domain.d.ts": { - "version": "d5b7c8819ce1bd31a45f7675309e145ec28e3aa1b60a8e0637fd0e8916255baa", - "signature": "d5b7c8819ce1bd31a45f7675309e145ec28e3aa1b60a8e0637fd0e8916255baa", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/events.d.ts": { - "version": "76048f3c7325a6c1fa6306d40eb0c8570fa0209d09472d46f9b1221f66edae6f", - "signature": "76048f3c7325a6c1fa6306d40eb0c8570fa0209d09472d46f9b1221f66edae6f", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/fs.d.ts": { - "version": "03be37150cc8fe48fd243169653f15149e0ed4a34eea0cae027b708d39eb01f8", - "signature": "03be37150cc8fe48fd243169653f15149e0ed4a34eea0cae027b708d39eb01f8", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/http.d.ts": { - "version": "f50ba0d2d8f891fa11326db36e6c25fe14bce747cf2bd9b554de3bb2a814f49c", - "signature": "f50ba0d2d8f891fa11326db36e6c25fe14bce747cf2bd9b554de3bb2a814f49c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/http2.d.ts": { - "version": "48b53111cc4ce136803fbf857cd8de2d5df33895b1af714a87caf87562182e46", - "signature": "48b53111cc4ce136803fbf857cd8de2d5df33895b1af714a87caf87562182e46", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/https.d.ts": { - "version": "dacbe08610729f6343ea9880ea8e737c6d7a6efa4a318d8f6acaf85db4aceed6", - "signature": "dacbe08610729f6343ea9880ea8e737c6d7a6efa4a318d8f6acaf85db4aceed6", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/inspector.d.ts": { - "version": "4218ced3933a31eed1278d350dd63c5900df0f0904f57d61c054d7a4b83dbe4c", - "signature": "4218ced3933a31eed1278d350dd63c5900df0f0904f57d61c054d7a4b83dbe4c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/module.d.ts": { - "version": "03394bf8deb8781b490ae9266a843fbdf00647947d79e25fcbf1d89a9e9c8a66", - "signature": "03394bf8deb8781b490ae9266a843fbdf00647947d79e25fcbf1d89a9e9c8a66", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/net.d.ts": { - "version": "358398fe4034395d85c87c319cca7a04001434b13dc68d067481ecb374385bfc", - "signature": "358398fe4034395d85c87c319cca7a04001434b13dc68d067481ecb374385bfc", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/os.d.ts": { - "version": "d9bc6f1917c24d862a68d2633e4a32fd586bfe3e412e5d11fd07d8266b94ced5", - "signature": "d9bc6f1917c24d862a68d2633e4a32fd586bfe3e412e5d11fd07d8266b94ced5", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/path.d.ts": { - "version": "5fb30076f0e0e5744db8993648bfb67aadd895f439edad5cce039127a87a8a36", - "signature": "5fb30076f0e0e5744db8993648bfb67aadd895f439edad5cce039127a87a8a36", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/perf_hooks.d.ts": { - "version": "93a8a589862b5ac8fd8bb46426f7b081ba825a5171337dd45de9bf141624d55e", - "signature": "93a8a589862b5ac8fd8bb46426f7b081ba825a5171337dd45de9bf141624d55e", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/process.d.ts": { - "version": "0e0d58f5e90c0a270dac052b9c5ad8ccdfc8271118c2105b361063218d528d6e", - "signature": "0e0d58f5e90c0a270dac052b9c5ad8ccdfc8271118c2105b361063218d528d6e", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/node/punycode.d.ts": { - "version": "3f6a1fd73c9dc3bd7f4b79bc075297ca6527904df69b0f2c2c94e4c4c7d9a32c", - "signature": "3f6a1fd73c9dc3bd7f4b79bc075297ca6527904df69b0f2c2c94e4c4c7d9a32c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/querystring.d.ts": { - "version": "758948c06a0d02623c7d4ed357ffa79bdc170de6e004046678774a1bfa9a29bb", - "signature": "758948c06a0d02623c7d4ed357ffa79bdc170de6e004046678774a1bfa9a29bb", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/readline.d.ts": { - "version": "2ca26a43dec700c4b0bdc04b123094f4becffda70e3960f3e10b025f7a15ba8f", - "signature": "2ca26a43dec700c4b0bdc04b123094f4becffda70e3960f3e10b025f7a15ba8f", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/repl.d.ts": { - "version": "27c3f3f672a6ce267f7cc34643231032016fa4b6d195c0725db570de0a7a9f91", - "signature": "27c3f3f672a6ce267f7cc34643231032016fa4b6d195c0725db570de0a7a9f91", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/stream.d.ts": { - "version": "9c581919a8c483f5080487ae8ec1dd398d94027aedf8e77436085e7fab23951a", - "signature": "9c581919a8c483f5080487ae8ec1dd398d94027aedf8e77436085e7fab23951a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/string_decoder.d.ts": { - "version": "7e62aac2cc9c0710d772047ad89e8d7117f52592c791eb995ce1f865fedab432", - "signature": "7e62aac2cc9c0710d772047ad89e8d7117f52592c791eb995ce1f865fedab432", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/timers.d.ts": { - "version": "b40652bf8ce4a18133b31349086523b219724dca8df3448c1a0742528e7ad5b9", - "signature": "b40652bf8ce4a18133b31349086523b219724dca8df3448c1a0742528e7ad5b9", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/tls.d.ts": { - "version": "48064f81a8354d04808e7b5bddf570aaf19f894cf1d8a2aa1f56c81decd33508", - "signature": "48064f81a8354d04808e7b5bddf570aaf19f894cf1d8a2aa1f56c81decd33508", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/trace_events.d.ts": { - "version": "a77fdb357c78b70142b2fdbbfb72958d69e8f765fd2a3c69946c1018e89d4638", - "signature": "a77fdb357c78b70142b2fdbbfb72958d69e8f765fd2a3c69946c1018e89d4638", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/tty.d.ts": { - "version": "3c2ac350c3baa61fd2b1925844109e098f4376d0768a4643abc82754fd752748", - "signature": "3c2ac350c3baa61fd2b1925844109e098f4376d0768a4643abc82754fd752748", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/url.d.ts": { - "version": "834545a7726e414890371aec1a89b7915963e08e790e093259e8bed429ef15c6", - "signature": "834545a7726e414890371aec1a89b7915963e08e790e093259e8bed429ef15c6", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/util.d.ts": { - "version": "b248fb69886bce18cf6650491f43f0326ed6d59c8fdf7fd63dbd35bf4ef3e2bc", - "signature": "b248fb69886bce18cf6650491f43f0326ed6d59c8fdf7fd63dbd35bf4ef3e2bc", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/v8.d.ts": { - "version": "4407bd5f1d6f748590ba125195eb1d7a003c2de2f3b057456d3ac76a742d2561", - "signature": "4407bd5f1d6f748590ba125195eb1d7a003c2de2f3b057456d3ac76a742d2561", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/vm.d.ts": { - "version": "2b57b7d7191c6e2efc2ed4f87cf1e25c383278ac5d019670406508df42dc34f3", - "signature": "2b57b7d7191c6e2efc2ed4f87cf1e25c383278ac5d019670406508df42dc34f3", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/worker_threads.d.ts": { - "version": "46f0413ecc0d83b047d46dbe03a37c7c760f59f0bb9a8633150e2d9335870675", - "signature": "46f0413ecc0d83b047d46dbe03a37c7c760f59f0bb9a8633150e2d9335870675", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/zlib.d.ts": { - "version": "2ea98f43cfae8dfbefc45d8bd1ec4907bbad33d18203ea8ef8b50d36b97afa35", - "signature": "2ea98f43cfae8dfbefc45d8bd1ec4907bbad33d18203ea8ef8b50d36b97afa35", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/ts3.3/base.d.ts": { - "version": "067b1964df87a4fc98ebffbd2bada6d7ed14a5b032f9071ea39478d82e701a99", - "signature": "067b1964df87a4fc98ebffbd2bada6d7ed14a5b032f9071ea39478d82e701a99", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/globals.global.d.ts": { - "version": "2708349d5a11a5c2e5f3a0765259ebe7ee00cdcc8161cb9990cb4910328442a1", - "signature": "2708349d5a11a5c2e5f3a0765259ebe7ee00cdcc8161cb9990cb4910328442a1", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/node/wasi.d.ts": { - "version": "14a6a3cee450438254c004a6b4f1191ec9977186bdeda07764f2a8d90ef71117", - "signature": "14a6a3cee450438254c004a6b4f1191ec9977186bdeda07764f2a8d90ef71117", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/ts3.6/base.d.ts": { - "version": "d170ea32762c00c660740f2cc0ca9526290ab9d9fb9c72282c1fa53cd1a7728e", - "signature": "d170ea32762c00c660740f2cc0ca9526290ab9d9fb9c72282c1fa53cd1a7728e", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/assert.d.ts": { - "version": "54b2276780dc8d538a71b954b87ea081a1e9f90e7f1195f2daf2bddde0bf52df", - "signature": "54b2276780dc8d538a71b954b87ea081a1e9f90e7f1195f2daf2bddde0bf52df", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/base.d.ts": { - "version": "e61a21e9418f279bc480394a94d1581b2dee73747adcbdef999b6737e34d721b", - "signature": "e61a21e9418f279bc480394a94d1581b2dee73747adcbdef999b6737e34d721b", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/index.d.ts": { - "version": "6fa68653382bd571bc63831e9f9c1307cc52f7310c1470463fe429d84147667d", - "signature": "6fa68653382bd571bc63831e9f9c1307cc52f7310c1470463fe429d84147667d", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/minimatch/index.d.ts": { - "version": "1d1e6bd176eee5970968423d7e215bfd66828b6db8d54d17afec05a831322633", - "signature": "1d1e6bd176eee5970968423d7e215bfd66828b6db8d54d17afec05a831322633", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/glob/index.d.ts": { - "version": "393137c76bd922ba70a2f8bf1ade4f59a16171a02fb25918c168d48875b0cfb0", - "signature": "393137c76bd922ba70a2f8bf1ade4f59a16171a02fb25918c168d48875b0cfb0", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/rimraf/index.d.ts": { - "version": "6462324ef579c47415610a63f1aa8b72f5b5114f8fe8307967f9add2bca634f5", - "signature": "6462324ef579c47415610a63f1aa8b72f5b5114f8fe8307967f9add2bca634f5", - "affectsGlobalScope": false - }, - "./src/adapters/fs.ts": { - "version": "97a8bee3a5b65983e9afb5b7efa4a50977d75b140e230fd2323c81f827770b39", - "signature": "6cee571d9f8ea88d197d1614e5cf328391e96dff69ca61ab5299a630d0c65822", - "affectsGlobalScope": false - }, - "../fs.stat/out/adapters/fs.d.ts": { - "version": "ceebf93146ac7b3f85276a2501de57c5cf5bb19742944c958bd831f995b41409", - "signature": "ceebf93146ac7b3f85276a2501de57c5cf5bb19742944c958bd831f995b41409", - "affectsGlobalScope": false - }, - "../fs.stat/out/settings.d.ts": { - "version": "30c47bd1f03a220a10e8c11708a2c73c04135999ca1a35271605f9683d36b432", - "signature": "30c47bd1f03a220a10e8c11708a2c73c04135999ca1a35271605f9683d36b432", - "affectsGlobalScope": false - }, - "../fs.stat/out/types/index.d.ts": { - "version": "8b9fa6dfb2bec7abe9937fe049505d896550b2ad600cb7114b6fe2813b5cf180", - "signature": "8b9fa6dfb2bec7abe9937fe049505d896550b2ad600cb7114b6fe2813b5cf180", - "affectsGlobalScope": false - }, - "../fs.stat/out/providers/async.d.ts": { - "version": "c692034610ac35559227657172f6f76581ee7b16c319c7d5973e19b650f11b9f", - "signature": "c692034610ac35559227657172f6f76581ee7b16c319c7d5973e19b650f11b9f", - "affectsGlobalScope": false - }, - "../fs.stat/out/index.d.ts": { - "version": "079488cc4bf1eef64297994ef8719c078a86380610beea1d1a920c9436997967", - "signature": "079488cc4bf1eef64297994ef8719c078a86380610beea1d1a920c9436997967", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/run-parallel/index.d.ts": { - "version": "eefea34ce2cdb15ab6678c8c7911c27b2c3da267d7922f192f3d2eb0bf621821", - "signature": "eefea34ce2cdb15ab6678c8c7911c27b2c3da267d7922f192f3d2eb0bf621821", - "affectsGlobalScope": false - }, - "./src/settings.ts": { - "version": "1e4df6b7ebf56063351b92afd854f2222b147c1cee37648dffc078441296d113", - "signature": "354b18c096926d3f35b597b6309d9792b6517840bbf972c095930d067e28de83", - "affectsGlobalScope": false - }, - "./src/types/index.ts": { - "version": "25cf5ed32a6edbc77c16dcbc59e2f8e447fe03e579a32d3aa3907ae57919977c", - "signature": "a86bb35bb7b946058e544aba4a7ac5359648d58904184701f1ca23b26b36ccd3", - "affectsGlobalScope": false - }, - "./src/utils/fs.ts": { - "version": "afd8d3bda70fc3602b633549c3647b4c5945274544690af57e058df7d3c7adc3", - "signature": "50d5aeb36e4c9ff996c8ecfe815588dda34ecc3d4773e8fd3507dc561a99b950", - "affectsGlobalScope": false - }, - "./src/utils/index.ts": { - "version": "1d6f2f71f6193511012745dec6b30646628c6d9275477a2eab9380b5133382bb", - "signature": "8990e1bf5a5a1ad74390339168ec2cc6de0c42a2edfc75d6dda3d9a05c70cf35", - "affectsGlobalScope": false - }, - "./src/providers/common.ts": { - "version": "74467f43113a719a495e44d5407e24094dccd039716bbf526842773a88608e0f", - "signature": "0483edfd584ef4cf3748ffebc395a3b93dae732cd6027fe1ab567c86039654ab", - "affectsGlobalScope": false - }, - "./src/providers/async.ts": { - "version": "87031909864afb217428dba1b92646be99009fc5b3de773d871b46469b0a4d04", - "signature": "fa316e5f457f8a5ef600f305a60cb0031f0cf07c7c6991b520d353739f56c6ad", - "affectsGlobalScope": false - }, - "./src/providers/sync.ts": { - "version": "46df98193e3b2de437807dcd354bddfb9d5d546b5791bfdfa1f99958ed1a1e91", - "signature": "e02ec7509984e7c388616c938f13b862166975fc6442dce1b6b3a5e630d43e19", - "affectsGlobalScope": false - }, - "./src/index.ts": { - "version": "1eb0c18106c45518281f6cae285f45345473184603e05a766f6e416fecba36b7", - "signature": "105b0c9e3b159a1f32ca33bf1cb5ead919ea15bab7143bd99b298ceb687b0e84", - "affectsGlobalScope": false - }, - "./src/index.spec.ts": { - "version": "2063af9ddce3744921cebb65bd70032ec84a6c2d3e359bc6a8fe5ba01484e0e7", - "signature": "a900cdf2c35bba00b0363cc950bbf88b887976e70a9eae929dad35ef964109d9", - "affectsGlobalScope": false - }, - "../fs.macchiato/out/types.d.ts": { - "version": "47b605d1e61f92f418c7879051e5458f8ec00aeacac419a37754066fec42b9ba", - "signature": "47b605d1e61f92f418c7879051e5458f8ec00aeacac419a37754066fec42b9ba", - "affectsGlobalScope": false - }, - "../fs.macchiato/out/dirent.d.ts": { - "version": "98387ef539ccca1023a5934f6ea2dd87ee8a6c87db31ec7986b9da016c66fc16", - "signature": "98387ef539ccca1023a5934f6ea2dd87ee8a6c87db31ec7986b9da016c66fc16", - "affectsGlobalScope": false - }, - "../fs.macchiato/out/stats.d.ts": { - "version": "7c70ba0c69002f78ddac880f0096de5b0e78248cf680c1fe1a89439dbf069c5d", - "signature": "7c70ba0c69002f78ddac880f0096de5b0e78248cf680c1fe1a89439dbf069c5d", - "affectsGlobalScope": false - }, - "../fs.macchiato/out/index.d.ts": { - "version": "e00937f585b9c2f95d9d4e00b4e76427eb9516c70a4470d805451ba2ea00044e", - "signature": "e00937f585b9c2f95d9d4e00b4e76427eb9516c70a4470d805451ba2ea00044e", - "affectsGlobalScope": false - }, - "./src/settings.spec.ts": { - "version": "7da4e11466b26fe96f3e4e3e46fc412870dd2dead2f1242897e088c526dd3625", - "signature": "714d2bb322e0442caf181768f049abd17a96d328d87169c2e6c13a86839c4463", - "affectsGlobalScope": false - }, - "./src/adapters/fs.spec.ts": { - "version": "c2643809431e7e7efde3d5788a889e0368005436bdb7f35925ef4e8ab758d99e", - "signature": "bd7314ded2b0851e1bb0834dc068cb4d18cdbecc9e965e8a0f4952ac3ee4610c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/sinon/ts3.1/index.d.ts": { - "version": "168435ab3390620aebf1aa0001b380983582d0849755eeb17f2c501d1fc57587", - "signature": "168435ab3390620aebf1aa0001b380983582d0849755eeb17f2c501d1fc57587", - "affectsGlobalScope": false - }, - "./src/providers/async.spec.ts": { - "version": "9f62b389e4d6f61e02f0a85f3622139c16d9ecdf9653c2c5db127908b338f531", - "signature": "6ff501c2b9280fbf7322044c48dff6eea6849df3b6ab6844facd9d789988a2c9", - "affectsGlobalScope": false - }, - "./src/providers/common.spec.ts": { - "version": "fc7c547c8896e8f5ba678866c1db946b0e142b2db65c7312dcca8a8e77a9b87b", - "signature": "d030e48e296ebe726e80c9b6e26988c7d9a5c1dba321a5f03d7ce297f130aeae", - "affectsGlobalScope": false - }, - "./src/providers/sync.spec.ts": { - "version": "11e77e69be2f8fd5f2de8a2262aef24bc17d90719007a784822514226fc30878", - "signature": "be22d8b5a836edfac7c9c5ef03e98058ec89f0b98edef8e54ea410187b0bda28", - "affectsGlobalScope": false - }, - "./src/utils/fs.spec.ts": { - "version": "33d768b4ce72260264526fb3106e7892d594217c8d3bb190728594a49442fcde", - "signature": "bd7314ded2b0851e1bb0834dc068cb4d18cdbecc9e965e8a0f4952ac3ee4610c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/eslint-visitor-keys/index.d.ts": { - "version": "725d9be2fd48440256f4deb00649adffdbc5ecd282b09e89d4e200663792c34c", - "signature": "725d9be2fd48440256f4deb00649adffdbc5ecd282b09e89d4e200663792c34c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/fs-extra/index.d.ts": { - "version": "aca36e2d27783f4bad7fc1786a532ff76024f0fc8575df48bcd9a5eb452fe7e7", - "signature": "aca36e2d27783f4bad7fc1786a532ff76024f0fc8575df48bcd9a5eb452fe7e7", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/highlight.js/index.d.ts": { - "version": "21a2fa3722dc0baba2649e040c3121eb38ce84f5afe35ff1c20276132eaa2f2c", - "signature": "21a2fa3722dc0baba2649e040c3121eb38ce84f5afe35ff1c20276132eaa2f2c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/json-schema/index.d.ts": { - "version": "b2be568d8ce95fcb26eebd04c035d94825655fdf689bf67d799f5ff8cbbb1024", - "signature": "b2be568d8ce95fcb26eebd04c035d94825655fdf689bf67d799f5ff8cbbb1024", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/common.d.ts": { - "version": "3594c022901a1c8993b0f78a3f534cfb81e7b619ed215348f7f6882f3db02abc", - "signature": "3594c022901a1c8993b0f78a3f534cfb81e7b619ed215348f7f6882f3db02abc", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/array.d.ts": { - "version": "d03a1ae3d39f757c9f22e4e775b940a98d86bb50ec85529b59e32a17b65c2b90", - "signature": "d03a1ae3d39f757c9f22e4e775b940a98d86bb50ec85529b59e32a17b65c2b90", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/collection.d.ts": { - "version": "0c75b204aed9cf6ff1c7b4bed87a3ece0d9d6fc857a6350c0c95ed0c38c814e8", - "signature": "0c75b204aed9cf6ff1c7b4bed87a3ece0d9d6fc857a6350c0c95ed0c38c814e8", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/date.d.ts": { - "version": "187119ff4f9553676a884e296089e131e8cc01691c546273b1d0089c3533ce42", - "signature": "187119ff4f9553676a884e296089e131e8cc01691c546273b1d0089c3533ce42", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/function.d.ts": { - "version": "c9f396e71966bd3a890d8a36a6a497dbf260e9b868158ea7824d4b5421210afe", - "signature": "c9f396e71966bd3a890d8a36a6a497dbf260e9b868158ea7824d4b5421210afe", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/lang.d.ts": { - "version": "509235563ea2b939e1bbe92aae17e71e6a82ceab8f568b45fb4fce7d72523a32", - "signature": "509235563ea2b939e1bbe92aae17e71e6a82ceab8f568b45fb4fce7d72523a32", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/math.d.ts": { - "version": "9364c7566b0be2f7b70ff5285eb34686f83ccb01bda529b82d23b2a844653bfb", - "signature": "9364c7566b0be2f7b70ff5285eb34686f83ccb01bda529b82d23b2a844653bfb", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/number.d.ts": { - "version": "00baffbe8a2f2e4875367479489b5d43b5fc1429ecb4a4cc98cfc3009095f52a", - "signature": "00baffbe8a2f2e4875367479489b5d43b5fc1429ecb4a4cc98cfc3009095f52a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/object.d.ts": { - "version": "c311349ec71bb69399ffc4092853e7d8a86c1ca39ddb4cd129e775c19d985793", - "signature": "c311349ec71bb69399ffc4092853e7d8a86c1ca39ddb4cd129e775c19d985793", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/seq.d.ts": { - "version": "3c92b6dfd43cc1c2485d9eba5ff0b74a19bb8725b692773ef1d66dac48cda4bd", - "signature": "3c92b6dfd43cc1c2485d9eba5ff0b74a19bb8725b692773ef1d66dac48cda4bd", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/string.d.ts": { - "version": "4908e4c00832b26ce77a629de8501b0e23a903c094f9e79a7fec313a15da796a", - "signature": "4908e4c00832b26ce77a629de8501b0e23a903c094f9e79a7fec313a15da796a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/util.d.ts": { - "version": "2630a7cbb597e85d713b7ef47f2946d4280d3d4c02733282770741d40672b1a5", - "signature": "2630a7cbb597e85d713b7ef47f2946d4280d3d4c02733282770741d40672b1a5", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/index.d.ts": { - "version": "0714e2046df66c0e93c3330d30dbc0565b3e8cd3ee302cf99e4ede6220e5fec8", - "signature": "0714e2046df66c0e93c3330d30dbc0565b3e8cd3ee302cf99e4ede6220e5fec8", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/marked/index.d.ts": { - "version": "c08a5e873738f5576ae1ca5810b5ebc30509f05bde56c3a3bbdd75d6c0806e6a", - "signature": "c08a5e873738f5576ae1ca5810b5ebc30509f05bde56c3a3bbdd75d6c0806e6a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/minimist/index.d.ts": { - "version": "e437d83044ba17246a861aa9691aa14223ff4a9d6f338ab1269c41c758586a88", - "signature": "e437d83044ba17246a861aa9691aa14223ff4a9d6f338ab1269c41c758586a88", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/mocha/index.d.ts": { - "version": "c4c03cf65951d980ba618ae9601d10438730803fc9c8a1f7b34af8739981e205", - "signature": "c4c03cf65951d980ba618ae9601d10438730803fc9c8a1f7b34af8739981e205", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/normalize-package-data/index.d.ts": { - "version": "c9ad058b2cc9ce6dc2ed92960d6d009e8c04bef46d3f5312283debca6869f613", - "signature": "c9ad058b2cc9ce6dc2ed92960d6d009e8c04bef46d3f5312283debca6869f613", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/shelljs/index.d.ts": { - "version": "b73abc91e3166b1951d302f8008c17e62d32e570e71b2680141f7c3f5d0a990d", - "signature": "b73abc91e3166b1951d302f8008c17e62d32e570e71b2680141f7c3f5d0a990d", - "affectsGlobalScope": false - } - }, - "options": { - "target": 4, - "module": 1, - "moduleResolution": 2, - "strict": true, - "alwaysStrict": true, - "strictFunctionTypes": true, - "strictNullChecks": true, - "strictPropertyInitialization": true, - "forceConsistentCasingInFileNames": true, - "noImplicitAny": true, - "noImplicitReturns": true, - "noImplicitThis": true, - "noFallthroughCasesInSwitch": true, - "noUnusedLocals": true, - "noUnusedParameters": true, - "emitDecoratorMetadata": true, - "experimentalDecorators": true, - "downlevelIteration": true, - "composite": true, - "declaration": true, - "declarationMap": true, - "pretty": true, - "rootDir": "./src", - "outDir": "./out", - "configFilePath": "./tsconfig.json" - }, - "referencedMap": { - "../../../node_modules/@types/fs-extra/index.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/glob/index.d.ts": [ - "../../../node_modules/@types/minimatch/index.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/array.d.ts": [ - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/collection.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/common.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/date.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/function.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/lang.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/math.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/number.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/object.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/seq.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/string.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/util.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/index.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts" - ], - "../../../node_modules/@types/node/base.d.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/ts3.6/base.d.ts" - ], - "../../../node_modules/@types/node/child_process.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/cluster.d.ts": [ - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/crypto.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/dgram.d.ts": [ - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/domain.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/fs.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/http.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/http2.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/https.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/index.d.ts": [ - "../../../node_modules/@types/node/base.d.ts" - ], - "../../../node_modules/@types/node/inspector.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/net.d.ts": [ - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/perf_hooks.d.ts": [ - "../../../node_modules/@types/node/async_hooks.d.ts" - ], - "../../../node_modules/@types/node/process.d.ts": [ - "../../../node_modules/@types/node/tty.d.ts" - ], - "../../../node_modules/@types/node/readline.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/repl.d.ts": [ - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/vm.d.ts" - ], - "../../../node_modules/@types/node/stream.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/tls.d.ts": [ - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/ts3.3/base.d.ts": [ - "../../../node_modules/@types/node/async_hooks.d.ts", - "../../../node_modules/@types/node/buffer.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/cluster.d.ts", - "../../../node_modules/@types/node/console.d.ts", - "../../../node_modules/@types/node/constants.d.ts", - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dgram.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/domain.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/globals.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/http2.d.ts", - "../../../node_modules/@types/node/https.d.ts", - "../../../node_modules/@types/node/inspector.d.ts", - "../../../node_modules/@types/node/module.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/os.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/node/perf_hooks.d.ts", - "../../../node_modules/@types/node/process.d.ts", - "../../../node_modules/@types/node/punycode.d.ts", - "../../../node_modules/@types/node/querystring.d.ts", - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/repl.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/string_decoder.d.ts", - "../../../node_modules/@types/node/timers.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/trace_events.d.ts", - "../../../node_modules/@types/node/tty.d.ts", - "../../../node_modules/@types/node/url.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/v8.d.ts", - "../../../node_modules/@types/node/vm.d.ts", - "../../../node_modules/@types/node/worker_threads.d.ts", - "../../../node_modules/@types/node/zlib.d.ts" - ], - "../../../node_modules/@types/node/ts3.6/base.d.ts": [ - "../../../node_modules/@types/node/globals.global.d.ts", - "../../../node_modules/@types/node/ts3.3/base.d.ts", - "../../../node_modules/@types/node/wasi.d.ts" - ], - "../../../node_modules/@types/node/tty.d.ts": [ - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/url.d.ts": [ - "../../../node_modules/@types/node/querystring.d.ts" - ], - "../../../node_modules/@types/node/v8.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/worker_threads.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/vm.d.ts" - ], - "../../../node_modules/@types/node/zlib.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/rimraf/index.d.ts": [ - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/shelljs/index.d.ts": [ - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.macchiato/out/dirent.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../fs.macchiato/out/types.d.ts" - ], - "../fs.macchiato/out/index.d.ts": [ - "../fs.macchiato/out/dirent.d.ts", - "../fs.macchiato/out/stats.d.ts" - ], - "../fs.macchiato/out/stats.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../fs.macchiato/out/types.d.ts" - ], - "./src/adapters/fs.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../fs.macchiato/out/index.d.ts", - "./src/adapters/fs.ts" - ], - "./src/adapters/fs.ts": [ - "../../../node_modules/@types/node/fs.d.ts" - ], - "./src/index.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/rimraf/index.d.ts", - "./src/index.ts" - ], - "./src/index.ts": [ - "./src/adapters/fs.ts", - "./src/providers/async.ts", - "./src/providers/sync.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/async.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "../fs.macchiato/out/index.d.ts", - "./src/constants.ts", - "./src/providers/async.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/async.ts": [ - "../../../node_modules/@types/run-parallel/index.d.ts", - "../fs.stat/out/index.d.ts", - "./src/constants.ts", - "./src/providers/common.ts", - "./src/settings.ts", - "./src/types/index.ts", - "./src/utils/index.ts" - ], - "./src/providers/common.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "./src/providers/common.ts" - ], - "./src/providers/sync.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "../fs.macchiato/out/index.d.ts", - "./src/constants.ts", - "./src/providers/sync.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/sync.ts": [ - "../fs.stat/out/index.d.ts", - "./src/constants.ts", - "./src/providers/common.ts", - "./src/settings.ts", - "./src/types/index.ts", - "./src/utils/index.ts" - ], - "./src/settings.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../fs.macchiato/out/index.d.ts", - "./src/adapters/fs.ts", - "./src/settings.ts" - ], - "./src/settings.ts": [ - "../../../node_modules/@types/node/path.d.ts", - "../fs.stat/out/index.d.ts", - "./src/adapters/fs.ts" - ], - "./src/types/index.ts": [ - "../../../node_modules/@types/node/fs.d.ts" - ], - "./src/utils/fs.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../fs.macchiato/out/index.d.ts", - "./src/utils/fs.ts" - ], - "./src/utils/fs.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "./src/types/index.ts" - ], - "./src/utils/index.ts": [ - "./src/utils/fs.ts" - ], - "../fs.stat/out/adapters/fs.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.stat/out/index.d.ts": [ - "../fs.stat/out/adapters/fs.d.ts", - "../fs.stat/out/providers/async.d.ts", - "../fs.stat/out/settings.d.ts", - "../fs.stat/out/types/index.d.ts" - ], - "../fs.stat/out/providers/async.d.ts": [ - "../fs.stat/out/settings.d.ts", - "../fs.stat/out/types/index.d.ts" - ], - "../fs.stat/out/settings.d.ts": [ - "../fs.stat/out/adapters/fs.d.ts" - ], - "../fs.stat/out/types/index.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ] - }, - "exportedModulesMap": { - "../../../node_modules/@types/fs-extra/index.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/glob/index.d.ts": [ - "../../../node_modules/@types/minimatch/index.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/array.d.ts": [ - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/collection.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/common.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/date.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/function.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/lang.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/math.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/number.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/object.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/seq.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/string.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/util.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/index.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts" - ], - "../../../node_modules/@types/node/base.d.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/ts3.6/base.d.ts" - ], - "../../../node_modules/@types/node/child_process.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/cluster.d.ts": [ - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/crypto.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/dgram.d.ts": [ - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/domain.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/fs.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/http.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/http2.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/https.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/index.d.ts": [ - "../../../node_modules/@types/node/base.d.ts" - ], - "../../../node_modules/@types/node/inspector.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/net.d.ts": [ - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/perf_hooks.d.ts": [ - "../../../node_modules/@types/node/async_hooks.d.ts" - ], - "../../../node_modules/@types/node/process.d.ts": [ - "../../../node_modules/@types/node/tty.d.ts" - ], - "../../../node_modules/@types/node/readline.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/repl.d.ts": [ - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/vm.d.ts" - ], - "../../../node_modules/@types/node/stream.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/tls.d.ts": [ - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/ts3.3/base.d.ts": [ - "../../../node_modules/@types/node/async_hooks.d.ts", - "../../../node_modules/@types/node/buffer.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/cluster.d.ts", - "../../../node_modules/@types/node/console.d.ts", - "../../../node_modules/@types/node/constants.d.ts", - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dgram.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/domain.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/globals.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/http2.d.ts", - "../../../node_modules/@types/node/https.d.ts", - "../../../node_modules/@types/node/inspector.d.ts", - "../../../node_modules/@types/node/module.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/os.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/node/perf_hooks.d.ts", - "../../../node_modules/@types/node/process.d.ts", - "../../../node_modules/@types/node/punycode.d.ts", - "../../../node_modules/@types/node/querystring.d.ts", - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/repl.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/string_decoder.d.ts", - "../../../node_modules/@types/node/timers.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/trace_events.d.ts", - "../../../node_modules/@types/node/tty.d.ts", - "../../../node_modules/@types/node/url.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/v8.d.ts", - "../../../node_modules/@types/node/vm.d.ts", - "../../../node_modules/@types/node/worker_threads.d.ts", - "../../../node_modules/@types/node/zlib.d.ts" - ], - "../../../node_modules/@types/node/ts3.6/base.d.ts": [ - "../../../node_modules/@types/node/globals.global.d.ts", - "../../../node_modules/@types/node/ts3.3/base.d.ts", - "../../../node_modules/@types/node/wasi.d.ts" - ], - "../../../node_modules/@types/node/tty.d.ts": [ - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/url.d.ts": [ - "../../../node_modules/@types/node/querystring.d.ts" - ], - "../../../node_modules/@types/node/v8.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/worker_threads.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/vm.d.ts" - ], - "../../../node_modules/@types/node/zlib.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/rimraf/index.d.ts": [ - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/shelljs/index.d.ts": [ - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.macchiato/out/dirent.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../fs.macchiato/out/types.d.ts" - ], - "../fs.macchiato/out/index.d.ts": [ - "../fs.macchiato/out/dirent.d.ts", - "../fs.macchiato/out/stats.d.ts" - ], - "../fs.macchiato/out/stats.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../fs.macchiato/out/types.d.ts" - ], - "./src/adapters/fs.ts": [ - "../../../node_modules/@types/node/fs.d.ts" - ], - "./src/index.ts": [ - "./src/adapters/fs.ts", - "./src/providers/async.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/async.ts": [ - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/sync.ts": [ - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/settings.ts": [ - "../fs.stat/out/index.d.ts", - "./src/adapters/fs.ts" - ], - "./src/types/index.ts": [ - "../../../node_modules/@types/node/fs.d.ts" - ], - "./src/utils/fs.ts": [ - "./src/types/index.ts" - ], - "./src/utils/index.ts": [ - "./src/utils/fs.ts" - ], - "../fs.stat/out/adapters/fs.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.stat/out/index.d.ts": [ - "../fs.stat/out/adapters/fs.d.ts", - "../fs.stat/out/providers/async.d.ts", - "../fs.stat/out/settings.d.ts", - "../fs.stat/out/types/index.d.ts" - ], - "../fs.stat/out/providers/async.d.ts": [ - "../fs.stat/out/settings.d.ts", - "../fs.stat/out/types/index.d.ts" - ], - "../fs.stat/out/settings.d.ts": [ - "../fs.stat/out/adapters/fs.d.ts" - ], - "../fs.stat/out/types/index.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ] - }, - "semanticDiagnosticsPerFile": [ - "../../../node_modules/@types/eslint-visitor-keys/index.d.ts", - "../../../node_modules/@types/fs-extra/index.d.ts", - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/highlight.js/index.d.ts", - "../../../node_modules/@types/json-schema/index.d.ts", - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts", - "../../../node_modules/@types/marked/index.d.ts", - "../../../node_modules/@types/minimatch/index.d.ts", - "../../../node_modules/@types/minimist/index.d.ts", - "../../../node_modules/@types/mocha/index.d.ts", - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/async_hooks.d.ts", - "../../../node_modules/@types/node/base.d.ts", - "../../../node_modules/@types/node/buffer.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/cluster.d.ts", - "../../../node_modules/@types/node/console.d.ts", - "../../../node_modules/@types/node/constants.d.ts", - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dgram.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/domain.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/globals.d.ts", - "../../../node_modules/@types/node/globals.global.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/http2.d.ts", - "../../../node_modules/@types/node/https.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../../../node_modules/@types/node/inspector.d.ts", - "../../../node_modules/@types/node/module.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/os.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/node/perf_hooks.d.ts", - "../../../node_modules/@types/node/process.d.ts", - "../../../node_modules/@types/node/punycode.d.ts", - "../../../node_modules/@types/node/querystring.d.ts", - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/repl.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/string_decoder.d.ts", - "../../../node_modules/@types/node/timers.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/trace_events.d.ts", - "../../../node_modules/@types/node/ts3.3/base.d.ts", - "../../../node_modules/@types/node/ts3.6/base.d.ts", - "../../../node_modules/@types/node/tty.d.ts", - "../../../node_modules/@types/node/url.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/v8.d.ts", - "../../../node_modules/@types/node/vm.d.ts", - "../../../node_modules/@types/node/wasi.d.ts", - "../../../node_modules/@types/node/worker_threads.d.ts", - "../../../node_modules/@types/node/zlib.d.ts", - "../../../node_modules/@types/normalize-package-data/index.d.ts", - "../../../node_modules/@types/rimraf/index.d.ts", - "../../../node_modules/@types/run-parallel/index.d.ts", - "../../../node_modules/@types/shelljs/index.d.ts", - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "../../../node_modules/typescript/lib/lib.dom.d.ts", - "../../../node_modules/typescript/lib/lib.dom.iterable.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.collection.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.core.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.generator.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.iterable.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.promise.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.proxy.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.reflect.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.symbol.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.symbol.wellknown.d.ts", - "../../../node_modules/typescript/lib/lib.es2016.array.include.d.ts", - "../../../node_modules/typescript/lib/lib.es2016.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.full.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.intl.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.object.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.sharedmemory.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.string.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.typedarrays.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.asyncgenerator.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.asynciterable.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.intl.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.promise.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.regexp.d.ts", - "../../../node_modules/typescript/lib/lib.es2020.bigint.d.ts", - "../../../node_modules/typescript/lib/lib.es5.d.ts", - "../../../node_modules/typescript/lib/lib.esnext.intl.d.ts", - "../../../node_modules/typescript/lib/lib.scripthost.d.ts", - "../../../node_modules/typescript/lib/lib.webworker.importscripts.d.ts", - "../fs.macchiato/out/dirent.d.ts", - "../fs.macchiato/out/index.d.ts", - "../fs.macchiato/out/stats.d.ts", - "../fs.macchiato/out/types.d.ts", - "./src/adapters/fs.spec.ts", - "./src/adapters/fs.ts", - "./src/constants.ts", - "./src/index.spec.ts", - "./src/index.ts", - "./src/providers/async.spec.ts", - "./src/providers/async.ts", - "./src/providers/common.spec.ts", - "./src/providers/common.ts", - "./src/providers/sync.spec.ts", - "./src/providers/sync.ts", - "./src/settings.spec.ts", - "./src/settings.ts", - "./src/types/index.ts", - "./src/utils/fs.spec.ts", - "./src/utils/fs.ts", - "./src/utils/index.ts", - "../fs.stat/out/adapters/fs.d.ts", - "../fs.stat/out/index.d.ts", - "../fs.stat/out/providers/async.d.ts", - "../fs.stat/out/settings.d.ts", - "../fs.stat/out/types/index.d.ts" - ] - }, - "version": "3.9.7" -} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/.eslintcache b/node_modules/@nodelib/fs.stat/.eslintcache deleted file mode 100644 index a547acb2..00000000 --- a/node_modules/@nodelib/fs.stat/.eslintcache +++ /dev/null @@ -1 +0,0 @@ -[{"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\adapters\\fs.spec.ts":"1","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\adapters\\fs.ts":"2","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\index.spec.ts":"3","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\index.ts":"4","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\providers\\async.spec.ts":"5","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\providers\\async.ts":"6","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\providers\\sync.spec.ts":"7","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\providers\\sync.ts":"8","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\settings.spec.ts":"9","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\settings.ts":"10","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\types\\index.ts":"11"},{"size":807,"mtime":1609075886217,"results":"12","hashOfConfig":"13"},{"size":544,"mtime":1609075886217,"results":"14","hashOfConfig":"13"},{"size":1664,"mtime":1609075886218,"results":"15","hashOfConfig":"13"},{"size":1576,"mtime":1609075886218,"results":"16","hashOfConfig":"13"},{"size":3262,"mtime":1609075886219,"results":"17","hashOfConfig":"13"},{"size":1258,"mtime":1609075886220,"results":"18","hashOfConfig":"13"},{"size":2676,"mtime":1609075886220,"results":"19","hashOfConfig":"13"},{"size":518,"mtime":1609075886220,"results":"20","hashOfConfig":"13"},{"size":943,"mtime":1609075886221,"results":"21","hashOfConfig":"13"},{"size":809,"mtime":1609075886221,"results":"22","hashOfConfig":"13"},{"size":109,"mtime":1609075886222,"results":"23","hashOfConfig":"13"},{"filePath":"24","messages":"25","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},"18hbtvp",{"filePath":"26","messages":"27","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"28","messages":"29","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"30","messages":"31","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"32","messages":"33","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"34","messages":"35","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"36","messages":"37","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"38","messages":"39","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"40","messages":"41","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"42","messages":"43","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"44","messages":"45","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\adapters\\fs.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\adapters\\fs.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\index.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\index.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\providers\\async.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\providers\\async.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\providers\\sync.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\providers\\sync.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\settings.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\settings.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.stat\\src\\types\\index.ts",[]] \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts b/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts index d17b356b..3af759c9 100644 --- a/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts +++ b/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts @@ -1,11 +1,13 @@ -/// -import * as fs from 'fs'; -export declare type FileSystemAdapter = { - lstat: typeof fs.lstat; - stat: typeof fs.stat; - lstatSync: typeof fs.lstatSync; - statSync: typeof fs.statSync; -}; -export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; -export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; -//# sourceMappingURL=fs.d.ts.map \ No newline at end of file +/// +import * as fs from 'fs'; +import type { ErrnoException } from '../types'; +export declare type StatAsynchronousMethod = (path: string, callback: (error: ErrnoException | null, stats: fs.Stats) => void) => void; +export declare type StatSynchronousMethod = (path: string) => fs.Stats; +export interface FileSystemAdapter { + lstat: StatAsynchronousMethod; + stat: StatAsynchronousMethod; + lstatSync: StatSynchronousMethod; + statSync: StatSynchronousMethod; +} +export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts.map b/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts.map deleted file mode 100644 index d84a447a..00000000 --- a/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"fs.d.ts","sourceRoot":"","sources":["../../src/adapters/fs.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AAEzB,oBAAY,iBAAiB,GAAG;IAC/B,KAAK,EAAE,OAAO,EAAE,CAAC,KAAK,CAAC;IACvB,IAAI,EAAE,OAAO,EAAE,CAAC,IAAI,CAAC;IACrB,SAAS,EAAE,OAAO,EAAE,CAAC,SAAS,CAAC;IAC/B,QAAQ,EAAE,OAAO,EAAE,CAAC,QAAQ,CAAC;CAC7B,CAAC;AAEF,eAAO,MAAM,mBAAmB,EAAE,iBAKjC,CAAC;AAEF,wBAAgB,uBAAuB,CAAC,SAAS,CAAC,EAAE,OAAO,CAAC,iBAAiB,CAAC,GAAG,iBAAiB,CASjG"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.js b/node_modules/@nodelib/fs.stat/out/adapters/fs.js index c000ee9b..8dc08c8c 100644 --- a/node_modules/@nodelib/fs.stat/out/adapters/fs.js +++ b/node_modules/@nodelib/fs.stat/out/adapters/fs.js @@ -1,17 +1,17 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; -const fs = require("fs"); -exports.FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - stat: fs.stat, - lstatSync: fs.lstatSync, - statSync: fs.statSync -}; -function createFileSystemAdapter(fsMethods) { - if (fsMethods === undefined) { - return exports.FILE_SYSTEM_ADAPTER; - } - return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); -} -exports.createFileSystemAdapter = createFileSystemAdapter; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +exports.FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + stat: fs.stat, + lstatSync: fs.lstatSync, + statSync: fs.statSync +}; +function createFileSystemAdapter(fsMethods) { + if (fsMethods === undefined) { + return exports.FILE_SYSTEM_ADAPTER; + } + return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); +} +exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.spec.d.ts b/node_modules/@nodelib/fs.stat/out/adapters/fs.spec.d.ts deleted file mode 100644 index 2858c4bc..00000000 --- a/node_modules/@nodelib/fs.stat/out/adapters/fs.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=fs.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.spec.d.ts.map b/node_modules/@nodelib/fs.stat/out/adapters/fs.spec.d.ts.map deleted file mode 100644 index 8e096bbc..00000000 --- a/node_modules/@nodelib/fs.stat/out/adapters/fs.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"fs.spec.d.ts","sourceRoot":"","sources":["../../src/adapters/fs.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.spec.js b/node_modules/@nodelib/fs.stat/out/adapters/fs.spec.js deleted file mode 100644 index 1b9031e7..00000000 --- a/node_modules/@nodelib/fs.stat/out/adapters/fs.spec.js +++ /dev/null @@ -1,20 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const fs_macchiato_1 = require("../../../fs.macchiato"); -const adapter = require("./fs"); -describe('Adapters → FileSystem', () => { - it('should return original FS methods', () => { - const expected = adapter.FILE_SYSTEM_ADAPTER; - const actual = adapter.createFileSystemAdapter(); - assert.deepStrictEqual(actual, expected); - }); - it('should return custom FS methods', () => { - const customLstatSyncMethod = () => new fs_macchiato_1.Stats(); - const expected = Object.assign(Object.assign({}, adapter.FILE_SYSTEM_ADAPTER), { lstatSync: customLstatSyncMethod }); - const actual = adapter.createFileSystemAdapter({ - lstatSync: customLstatSyncMethod - }); - assert.deepStrictEqual(actual, expected); - }); -}); diff --git a/node_modules/@nodelib/fs.stat/out/index.d.ts b/node_modules/@nodelib/fs.stat/out/index.d.ts index 5f092f9c..f95db995 100644 --- a/node_modules/@nodelib/fs.stat/out/index.d.ts +++ b/node_modules/@nodelib/fs.stat/out/index.d.ts @@ -1,13 +1,12 @@ -import { FileSystemAdapter } from './adapters/fs'; -import * as async from './providers/async'; -import Settings, { Options } from './settings'; -import { Stats } from './types'; -declare type AsyncCallback = async.AsyncCallback; -declare function stat(path: string, callback: AsyncCallback): void; -declare function stat(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -declare namespace stat { - function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; -} -declare function statSync(path: string, optionsOrSettings?: Options | Settings): Stats; -export { Settings, stat, statSync, AsyncCallback, FileSystemAdapter, Options, Stats }; -//# sourceMappingURL=index.d.ts.map \ No newline at end of file +import type { FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod } from './adapters/fs'; +import * as async from './providers/async'; +import Settings, { Options } from './settings'; +import type { Stats } from './types'; +declare type AsyncCallback = async.AsyncCallback; +declare function stat(path: string, callback: AsyncCallback): void; +declare function stat(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace stat { + function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function statSync(path: string, optionsOrSettings?: Options | Settings): Stats; +export { Settings, stat, statSync, AsyncCallback, FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod, Options, Stats }; diff --git a/node_modules/@nodelib/fs.stat/out/index.d.ts.map b/node_modules/@nodelib/fs.stat/out/index.d.ts.map deleted file mode 100644 index e07ff1ac..00000000 --- a/node_modules/@nodelib/fs.stat/out/index.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAClD,OAAO,KAAK,KAAK,MAAM,mBAAmB,CAAC;AAE3C,OAAO,QAAQ,EAAE,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AAC/C,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAEhC,aAAK,aAAa,GAAG,KAAK,CAAC,aAAa,CAAC;AAEzC,iBAAS,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,aAAa,GAAG,IAAI,CAAC;AAC3D,iBAAS,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,iBAAiB,EAAE,OAAO,GAAG,QAAQ,EAAE,QAAQ,EAAE,aAAa,GAAG,IAAI,CAAC;AAWlG,OAAO,WAAW,IAAI,CAAC;IACtB,SAAS,aAAa,CAAC,IAAI,EAAE,MAAM,EAAE,iBAAiB,CAAC,EAAE,OAAO,GAAG,QAAQ,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC;CAC7F;AAED,iBAAS,QAAQ,CAAC,IAAI,EAAE,MAAM,EAAE,iBAAiB,CAAC,EAAE,OAAO,GAAG,QAAQ,GAAG,KAAK,CAI7E;AAUD,OAAO,EACN,QAAQ,EACR,IAAI,EACJ,QAAQ,EAIR,aAAa,EACb,iBAAiB,EACjB,OAAO,EACP,KAAK,EACL,CAAC"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/index.js b/node_modules/@nodelib/fs.stat/out/index.js index 9808b5a9..b23f7510 100644 --- a/node_modules/@nodelib/fs.stat/out/index.js +++ b/node_modules/@nodelib/fs.stat/out/index.js @@ -1,25 +1,26 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.statSync = exports.stat = exports.Settings = void 0; -const async = require("./providers/async"); -const sync = require("./providers/sync"); -const settings_1 = require("./settings"); -exports.Settings = settings_1.default; -function stat(path, optionsOrSettingsOrCallback, callback) { - if (typeof optionsOrSettingsOrCallback === 'function') { - return async.read(path, getSettings(), optionsOrSettingsOrCallback); - } - async.read(path, getSettings(optionsOrSettingsOrCallback), callback); -} -exports.stat = stat; -function statSync(path, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - return sync.read(path, settings); -} -exports.statSync = statSync; -function getSettings(settingsOrOptions = {}) { - if (settingsOrOptions instanceof settings_1.default) { - return settingsOrOptions; - } - return new settings_1.default(settingsOrOptions); -} +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.statSync = exports.stat = exports.Settings = void 0; +const async = require("./providers/async"); +const sync = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function stat(path, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; + } + async.read(path, getSettings(optionsOrSettingsOrCallback), callback); +} +exports.stat = stat; +function statSync(path, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + return sync.read(path, settings); +} +exports.statSync = statSync; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/@nodelib/fs.stat/out/index.spec.d.ts b/node_modules/@nodelib/fs.stat/out/index.spec.d.ts deleted file mode 100644 index 4e9d2bbe..00000000 --- a/node_modules/@nodelib/fs.stat/out/index.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=index.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/index.spec.d.ts.map b/node_modules/@nodelib/fs.stat/out/index.spec.d.ts.map deleted file mode 100644 index 47bd6666..00000000 --- a/node_modules/@nodelib/fs.stat/out/index.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.spec.d.ts","sourceRoot":"","sources":["../src/index.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/index.spec.js b/node_modules/@nodelib/fs.stat/out/index.spec.js deleted file mode 100644 index 941ec3ee..00000000 --- a/node_modules/@nodelib/fs.stat/out/index.spec.js +++ /dev/null @@ -1,56 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const fs = require("fs"); -const rimraf = require("rimraf"); -const _1 = require("."); -describe('Package', () => { - before(() => { - rimraf.sync('fixtures'); - fs.mkdirSync('fixtures'); - fs.mkdirSync('fixtures/a'); - fs.symlinkSync('a', 'fixtures/b', 'junction'); - }); - after(() => { - rimraf.sync('fixtures'); - }); - describe('.stat', () => { - it('should work without options or settings', (done) => { - _1.stat('fixtures/b', (error, stats) => { - assert.strictEqual(error, null); - assert.ok(stats); - done(); - }); - }); - it('should work with options', (done) => { - _1.stat('fixtures/b', { markSymbolicLink: true }, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.isSymbolicLink(), true); - done(); - }); - }); - it('should work with settings', (done) => { - const settings = new _1.Settings({ markSymbolicLink: true }); - _1.stat('fixtures/b', settings, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.isSymbolicLink(), true); - done(); - }); - }); - }); - describe('.statSync', () => { - it('should work without options or settings', () => { - const actual = _1.statSync('fixtures/b'); - assert.ok(actual); - }); - it('should work with options', () => { - const actual = _1.statSync('fixtures/b', { markSymbolicLink: true }); - assert.strictEqual(actual.isSymbolicLink(), true); - }); - it('should work with settings', () => { - const settings = new _1.Settings({ markSymbolicLink: true }); - const actual = _1.statSync('fixtures/b', settings); - assert.strictEqual(actual.isSymbolicLink(), true); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.d.ts b/node_modules/@nodelib/fs.stat/out/providers/async.d.ts index a9637c55..85423ce1 100644 --- a/node_modules/@nodelib/fs.stat/out/providers/async.d.ts +++ b/node_modules/@nodelib/fs.stat/out/providers/async.d.ts @@ -1,5 +1,4 @@ -import Settings from '../settings'; -import { ErrnoException, Stats } from '../types'; -export declare type AsyncCallback = (err: ErrnoException, stats: Stats) => void; -export declare function read(path: string, settings: Settings, callback: AsyncCallback): void; -//# sourceMappingURL=async.d.ts.map \ No newline at end of file +import type Settings from '../settings'; +import type { ErrnoException, Stats } from '../types'; +export declare type AsyncCallback = (error: ErrnoException, stats: Stats) => void; +export declare function read(path: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.d.ts.map b/node_modules/@nodelib/fs.stat/out/providers/async.d.ts.map deleted file mode 100644 index 2a50cf6e..00000000 --- a/node_modules/@nodelib/fs.stat/out/providers/async.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"async.d.ts","sourceRoot":"","sources":["../../src/providers/async.ts"],"names":[],"mappings":"AAAA,OAAO,QAAQ,MAAM,aAAa,CAAC;AACnC,OAAO,EAAE,cAAc,EAAE,KAAK,EAAE,MAAM,UAAU,CAAC;AAKjD,oBAAY,aAAa,GAAG,CAAC,GAAG,EAAE,cAAc,EAAE,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;AAExE,wBAAgB,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,aAAa,GAAG,IAAI,CA0BpF"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.js b/node_modules/@nodelib/fs.stat/out/providers/async.js index 6235ea98..983ff0e6 100644 --- a/node_modules/@nodelib/fs.stat/out/providers/async.js +++ b/node_modules/@nodelib/fs.stat/out/providers/async.js @@ -1,32 +1,36 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.read = void 0; -function read(path, settings, callback) { - settings.fs.lstat(path, (lstatError, lstat) => { - if (lstatError !== null) { - return callFailureCallback(callback, lstatError); - } - if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { - return callSuccessCallback(callback, lstat); - } - settings.fs.stat(path, (statError, stat) => { - if (statError !== null) { - if (settings.throwErrorOnBrokenSymbolicLink) { - return callFailureCallback(callback, statError); - } - return callSuccessCallback(callback, lstat); - } - if (settings.markSymbolicLink) { - stat.isSymbolicLink = () => true; - } - callSuccessCallback(callback, stat); - }); - }); -} -exports.read = read; -function callFailureCallback(callback, error) { - callback(error); -} -function callSuccessCallback(callback, result) { - callback(null, result); -} +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.read = void 0; +function read(path, settings, callback) { + settings.fs.lstat(path, (lstatError, lstat) => { + if (lstatError !== null) { + callFailureCallback(callback, lstatError); + return; + } + if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { + callSuccessCallback(callback, lstat); + return; + } + settings.fs.stat(path, (statError, stat) => { + if (statError !== null) { + if (settings.throwErrorOnBrokenSymbolicLink) { + callFailureCallback(callback, statError); + return; + } + callSuccessCallback(callback, lstat); + return; + } + if (settings.markSymbolicLink) { + stat.isSymbolicLink = () => true; + } + callSuccessCallback(callback, stat); + }); + }); +} +exports.read = read; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, result) { + callback(null, result); +} diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.spec.d.ts b/node_modules/@nodelib/fs.stat/out/providers/async.spec.d.ts deleted file mode 100644 index c3f82484..00000000 --- a/node_modules/@nodelib/fs.stat/out/providers/async.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=async.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.spec.d.ts.map b/node_modules/@nodelib/fs.stat/out/providers/async.spec.d.ts.map deleted file mode 100644 index a67f476b..00000000 --- a/node_modules/@nodelib/fs.stat/out/providers/async.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"async.spec.d.ts","sourceRoot":"","sources":["../../src/providers/async.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.spec.js b/node_modules/@nodelib/fs.stat/out/providers/async.spec.js deleted file mode 100644 index ead04b53..00000000 --- a/node_modules/@nodelib/fs.stat/out/providers/async.spec.js +++ /dev/null @@ -1,83 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const sinon = require("sinon"); -const fs_macchiato_1 = require("../../../fs.macchiato"); -const settings_1 = require("../settings"); -const provider = require("./async"); -describe('Providers → Async', () => { - describe('.read', () => { - it('should return lstat for non-symlink entry', (done) => { - const lstat = sinon.stub().yields(null, new fs_macchiato_1.Stats()); - const settings = new settings_1.default({ - fs: { lstat } - }); - provider.read('filepath', settings, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.ino, 0); - done(); - }); - }); - it('should return lstat for symlink entry when the "followSymbolicLink" option is disabled', (done) => { - const lstat = sinon.stub().yields(null, new fs_macchiato_1.Stats({ isSymbolicLink: true })); - const settings = new settings_1.default({ - followSymbolicLink: false, - fs: { lstat } - }); - provider.read('filepath', settings, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.ino, 0); - done(); - }); - }); - it('should return stat for symlink entry', (done) => { - const lstat = sinon.stub().yields(null, new fs_macchiato_1.Stats({ isSymbolicLink: true })); - const stat = sinon.stub().yields(null, new fs_macchiato_1.Stats({ ino: 1 })); - const settings = new settings_1.default({ - fs: { lstat, stat } - }); - provider.read('filepath', settings, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.ino, 1); - done(); - }); - }); - it('should return marked stat for symlink entry when the "markSymbolicLink" option is enabled', (done) => { - const lstat = sinon.stub().yields(null, new fs_macchiato_1.Stats({ isSymbolicLink: true })); - const stat = sinon.stub().yields(null, new fs_macchiato_1.Stats({ ino: 1 })); - const settings = new settings_1.default({ - fs: { lstat, stat }, - markSymbolicLink: true - }); - provider.read('filepath', settings, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.isSymbolicLink(), true); - done(); - }); - }); - it('should return lstat for broken symlink entry when the "throwErrorOnBrokenSymbolicLink" option is disabled', (done) => { - const lstat = sinon.stub().yields(null, new fs_macchiato_1.Stats({ isSymbolicLink: true })); - const stat = sinon.stub().yields(new Error()); - const settings = new settings_1.default({ - fs: { lstat, stat }, - throwErrorOnBrokenSymbolicLink: false - }); - provider.read('filepath', settings, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.ino, 0); - done(); - }); - }); - it('should throw an error when symlink entry is broken', (done) => { - const lstat = sinon.stub().yields(null, new fs_macchiato_1.Stats({ isSymbolicLink: true })); - const stat = sinon.stub().yields(new Error('broken')); - const settings = new settings_1.default({ - fs: { lstat, stat } - }); - provider.read('filepath', settings, (error) => { - assert.strictEqual(error.message, 'broken'); - done(); - }); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts b/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts index f4c1d78d..428c3d79 100644 --- a/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts +++ b/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts @@ -1,4 +1,3 @@ -import Settings from '../settings'; -import { Stats } from '../types'; -export declare function read(path: string, settings: Settings): Stats; -//# sourceMappingURL=sync.d.ts.map \ No newline at end of file +import type Settings from '../settings'; +import type { Stats } from '../types'; +export declare function read(path: string, settings: Settings): Stats; diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts.map b/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts.map deleted file mode 100644 index 4f4eb438..00000000 --- a/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"sync.d.ts","sourceRoot":"","sources":["../../src/providers/sync.ts"],"names":[],"mappings":"AAAA,OAAO,QAAQ,MAAM,aAAa,CAAC;AACnC,OAAO,EAAE,KAAK,EAAE,MAAM,UAAU,CAAC;AAEjC,wBAAgB,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,GAAG,KAAK,CAsB5D"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.js b/node_modules/@nodelib/fs.stat/out/providers/sync.js index d478e1f3..1521c361 100644 --- a/node_modules/@nodelib/fs.stat/out/providers/sync.js +++ b/node_modules/@nodelib/fs.stat/out/providers/sync.js @@ -1,23 +1,23 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.read = void 0; -function read(path, settings) { - const lstat = settings.fs.lstatSync(path); - if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { - return lstat; - } - try { - const stat = settings.fs.statSync(path); - if (settings.markSymbolicLink) { - stat.isSymbolicLink = () => true; - } - return stat; - } - catch (error) { - if (!settings.throwErrorOnBrokenSymbolicLink) { - return lstat; - } - throw error; - } -} -exports.read = read; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.read = void 0; +function read(path, settings) { + const lstat = settings.fs.lstatSync(path); + if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { + return lstat; + } + try { + const stat = settings.fs.statSync(path); + if (settings.markSymbolicLink) { + stat.isSymbolicLink = () => true; + } + return stat; + } + catch (error) { + if (!settings.throwErrorOnBrokenSymbolicLink) { + return lstat; + } + throw error; + } +} +exports.read = read; diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.spec.d.ts b/node_modules/@nodelib/fs.stat/out/providers/sync.spec.d.ts deleted file mode 100644 index 5167ab33..00000000 --- a/node_modules/@nodelib/fs.stat/out/providers/sync.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=sync.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.spec.d.ts.map b/node_modules/@nodelib/fs.stat/out/providers/sync.spec.d.ts.map deleted file mode 100644 index aae94987..00000000 --- a/node_modules/@nodelib/fs.stat/out/providers/sync.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"sync.spec.d.ts","sourceRoot":"","sources":["../../src/providers/sync.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.spec.js b/node_modules/@nodelib/fs.stat/out/providers/sync.spec.js deleted file mode 100644 index 99e34b9d..00000000 --- a/node_modules/@nodelib/fs.stat/out/providers/sync.spec.js +++ /dev/null @@ -1,66 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const sinon = require("sinon"); -const fs_macchiato_1 = require("../../../fs.macchiato"); -const settings_1 = require("../settings"); -const provider = require("./sync"); -describe('Providers → Sync', () => { - describe('.read', () => { - it('should return lstat for non-symlink entry', () => { - const lstatSync = sinon.stub().returns(new fs_macchiato_1.Stats()); - const settings = new settings_1.default({ - fs: { lstatSync } - }); - const actual = provider.read('filepath', settings); - assert.strictEqual(actual.ino, 0); - }); - it('should return lstat for symlink entry when the "followSymbolicLink" option is disabled', () => { - const lstatSync = sinon.stub().returns(new fs_macchiato_1.Stats({ isSymbolicLink: true })); - const settings = new settings_1.default({ - followSymbolicLink: false, - fs: { lstatSync } - }); - const actual = provider.read('filepath', settings); - assert.strictEqual(actual.ino, 0); - }); - it('should return stat for symlink entry', () => { - const lstatSync = sinon.stub().returns(new fs_macchiato_1.Stats({ isSymbolicLink: true })); - const statSync = sinon.stub().returns(new fs_macchiato_1.Stats({ ino: 1 })); - const settings = new settings_1.default({ - fs: { lstatSync, statSync } - }); - const actual = provider.read('filepath', settings); - assert.strictEqual(actual.ino, 1); - }); - it('should return marked stat for symlink entry when the "markSymbolicLink" option is enabled', () => { - const lstatSync = sinon.stub().returns(new fs_macchiato_1.Stats({ isSymbolicLink: true })); - const statSync = sinon.stub().returns(new fs_macchiato_1.Stats({ ino: 1 })); - const settings = new settings_1.default({ - markSymbolicLink: true, - fs: { lstatSync, statSync } - }); - const actual = provider.read('filepath', settings); - assert.strictEqual(actual.isSymbolicLink(), true); - }); - it('should return lstat for broken symlink entry when the "throwErrorOnBrokenSymbolicLink" option is disabled', () => { - const lstatSync = sinon.stub().returns(new fs_macchiato_1.Stats({ isSymbolicLink: true })); - const statSync = sinon.stub().throws(new Error('error')); - const settings = new settings_1.default({ - fs: { lstatSync, statSync }, - throwErrorOnBrokenSymbolicLink: false - }); - const actual = provider.read('filepath', settings); - assert.strictEqual(actual.ino, 0); - }); - it('should throw an error when symlink entry is broken', () => { - const lstatSync = sinon.stub().returns(new fs_macchiato_1.Stats({ isSymbolicLink: true })); - const statSync = sinon.stub().throws(new Error('broken')); - const settings = new settings_1.default({ - fs: { lstatSync, statSync } - }); - const expectedErrorMessageRe = /broken/; - assert.throws(() => provider.read('filepath', settings), expectedErrorMessageRe); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.stat/out/settings.d.ts b/node_modules/@nodelib/fs.stat/out/settings.d.ts index 34c46206..f4b3d444 100644 --- a/node_modules/@nodelib/fs.stat/out/settings.d.ts +++ b/node_modules/@nodelib/fs.stat/out/settings.d.ts @@ -1,17 +1,16 @@ -import * as fs from './adapters/fs'; -export declare type Options = { - followSymbolicLink?: boolean; - fs?: Partial; - markSymbolicLink?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -}; -export default class Settings { - private readonly _options; - readonly followSymbolicLink: boolean; - readonly fs: fs.FileSystemAdapter; - readonly markSymbolicLink: boolean; - readonly throwErrorOnBrokenSymbolicLink: boolean; - constructor(_options?: Options); - private _getValue; -} -//# sourceMappingURL=settings.d.ts.map \ No newline at end of file +import * as fs from './adapters/fs'; +export interface Options { + followSymbolicLink?: boolean; + fs?: Partial; + markSymbolicLink?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly followSymbolicLink: boolean; + readonly fs: fs.FileSystemAdapter; + readonly markSymbolicLink: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/@nodelib/fs.stat/out/settings.d.ts.map b/node_modules/@nodelib/fs.stat/out/settings.d.ts.map deleted file mode 100644 index e98652ce..00000000 --- a/node_modules/@nodelib/fs.stat/out/settings.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"settings.d.ts","sourceRoot":"","sources":["../src/settings.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,eAAe,CAAC;AAEpC,oBAAY,OAAO,GAAG;IACrB,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B,EAAE,CAAC,EAAE,OAAO,CAAC,EAAE,CAAC,iBAAiB,CAAC,CAAC;IACnC,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,8BAA8B,CAAC,EAAE,OAAO,CAAC;CACzC,CAAC;AAEF,MAAM,CAAC,OAAO,OAAO,QAAQ;IAMhB,OAAO,CAAC,QAAQ,CAAC,QAAQ;IALrC,SAAgB,kBAAkB,EAAE,OAAO,CAA0D;IACrG,SAAgB,EAAE,EAAE,EAAE,CAAC,iBAAiB,CAAgD;IACxF,SAAgB,gBAAgB,EAAE,OAAO,CAAyD;IAClG,SAAgB,8BAA8B,EAAE,OAAO,CAAsE;gBAEhG,QAAQ,GAAE,OAAY;IAEnD,OAAO,CAAC,SAAS;CAGjB"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/settings.js b/node_modules/@nodelib/fs.stat/out/settings.js index 18fa816c..111ec09c 100644 --- a/node_modules/@nodelib/fs.stat/out/settings.js +++ b/node_modules/@nodelib/fs.stat/out/settings.js @@ -1,16 +1,16 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("./adapters/fs"); -class Settings { - constructor(_options = {}) { - this._options = _options; - this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); - this.fs = fs.createFileSystemAdapter(this._options.fs); - this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); - } - _getValue(option, value) { - return option !== null && option !== void 0 ? option : value; - } -} -exports.default = Settings; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs = require("./adapters/fs"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); + this.fs = fs.createFileSystemAdapter(this._options.fs); + this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/@nodelib/fs.stat/out/settings.spec.d.ts b/node_modules/@nodelib/fs.stat/out/settings.spec.d.ts deleted file mode 100644 index ff5bc0fd..00000000 --- a/node_modules/@nodelib/fs.stat/out/settings.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=settings.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/settings.spec.d.ts.map b/node_modules/@nodelib/fs.stat/out/settings.spec.d.ts.map deleted file mode 100644 index b69e1aa7..00000000 --- a/node_modules/@nodelib/fs.stat/out/settings.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"settings.spec.d.ts","sourceRoot":"","sources":["../src/settings.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/settings.spec.js b/node_modules/@nodelib/fs.stat/out/settings.spec.js deleted file mode 100644 index 14489ee2..00000000 --- a/node_modules/@nodelib/fs.stat/out/settings.spec.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const fs_macchiato_1 = require("../../fs.macchiato"); -const fs = require("./adapters/fs"); -const settings_1 = require("./settings"); -describe('Settings', () => { - it('should return instance with default values', () => { - const settings = new settings_1.default(); - assert.deepStrictEqual(settings.fs, fs.createFileSystemAdapter()); - assert.ok(settings.throwErrorOnBrokenSymbolicLink); - assert.ok(!settings.markSymbolicLink); - assert.ok(settings.followSymbolicLink); - }); - it('should return instance with custom values', () => { - const lstatSync = () => new fs_macchiato_1.Stats(); - const settings = new settings_1.default({ - followSymbolicLink: false, - fs: fs.createFileSystemAdapter({ lstatSync }), - throwErrorOnBrokenSymbolicLink: false - }); - assert.deepStrictEqual(settings.fs, fs.createFileSystemAdapter({ lstatSync })); - assert.ok(!settings.throwErrorOnBrokenSymbolicLink); - assert.ok(!settings.followSymbolicLink); - }); -}); diff --git a/node_modules/@nodelib/fs.stat/out/types/index.d.ts b/node_modules/@nodelib/fs.stat/out/types/index.d.ts index 227f7bf8..74c08ed2 100644 --- a/node_modules/@nodelib/fs.stat/out/types/index.d.ts +++ b/node_modules/@nodelib/fs.stat/out/types/index.d.ts @@ -1,5 +1,4 @@ -/// -import * as fs from 'fs'; -export declare type Stats = fs.Stats; -export declare type ErrnoException = NodeJS.ErrnoException; -//# sourceMappingURL=index.d.ts.map \ No newline at end of file +/// +import type * as fs from 'fs'; +export declare type Stats = fs.Stats; +export declare type ErrnoException = NodeJS.ErrnoException; diff --git a/node_modules/@nodelib/fs.stat/out/types/index.d.ts.map b/node_modules/@nodelib/fs.stat/out/types/index.d.ts.map deleted file mode 100644 index 9c0ca100..00000000 --- a/node_modules/@nodelib/fs.stat/out/types/index.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/types/index.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AAEzB,oBAAY,KAAK,GAAG,EAAE,CAAC,KAAK,CAAC;AAC7B,oBAAY,cAAc,GAAG,MAAM,CAAC,cAAc,CAAC"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.stat/out/types/index.js b/node_modules/@nodelib/fs.stat/out/types/index.js index ce03781e..c8ad2e54 100644 --- a/node_modules/@nodelib/fs.stat/out/types/index.js +++ b/node_modules/@nodelib/fs.stat/out/types/index.js @@ -1,2 +1,2 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.stat/package.json b/node_modules/@nodelib/fs.stat/package.json index 7fef92ee..f2540c28 100644 --- a/node_modules/@nodelib/fs.stat/package.json +++ b/node_modules/@nodelib/fs.stat/package.json @@ -1,6 +1,6 @@ { "name": "@nodelib/fs.stat", - "version": "2.0.4", + "version": "2.0.5", "description": "Get the status of a file with some features", "license": "MIT", "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.stat", @@ -14,6 +14,11 @@ "engines": { "node": ">= 8" }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*" + ], "main": "out/index.js", "typings": "out/index.d.ts", "scripts": { @@ -25,5 +30,8 @@ "build": "npm run clean && npm run compile && npm run lint && npm test", "watch": "npm run clean && npm run compile:watch" }, - "gitHead": "cb5f7e893a986164c3b847a4f1faef6c54cadd68" + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4" + }, + "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" } diff --git a/node_modules/@nodelib/fs.stat/src/adapters/fs.spec.ts b/node_modules/@nodelib/fs.stat/src/adapters/fs.spec.ts deleted file mode 100644 index e274bc03..00000000 --- a/node_modules/@nodelib/fs.stat/src/adapters/fs.spec.ts +++ /dev/null @@ -1,31 +0,0 @@ -import * as assert from 'assert'; -import * as fs from 'fs'; - -import { Stats } from '../../../fs.macchiato'; - -import * as adapter from './fs'; - -describe('Adapters → FileSystem', () => { - it('should return original FS methods', () => { - const expected: adapter.FileSystemAdapter = adapter.FILE_SYSTEM_ADAPTER; - - const actual = adapter.createFileSystemAdapter(); - - assert.deepStrictEqual(actual, expected); - }); - - it('should return custom FS methods', () => { - const customLstatSyncMethod: typeof fs.lstatSync = () => new Stats(); - - const expected: adapter.FileSystemAdapter = { - ...adapter.FILE_SYSTEM_ADAPTER, - lstatSync: customLstatSyncMethod - }; - - const actual = adapter.createFileSystemAdapter({ - lstatSync: customLstatSyncMethod - }); - - assert.deepStrictEqual(actual, expected); - }); -}); diff --git a/node_modules/@nodelib/fs.stat/src/adapters/fs.ts b/node_modules/@nodelib/fs.stat/src/adapters/fs.ts deleted file mode 100644 index 7f31cbb6..00000000 --- a/node_modules/@nodelib/fs.stat/src/adapters/fs.ts +++ /dev/null @@ -1,26 +0,0 @@ -import * as fs from 'fs'; - -export type FileSystemAdapter = { - lstat: typeof fs.lstat; - stat: typeof fs.stat; - lstatSync: typeof fs.lstatSync; - statSync: typeof fs.statSync; -}; - -export const FILE_SYSTEM_ADAPTER: FileSystemAdapter = { - lstat: fs.lstat, - stat: fs.stat, - lstatSync: fs.lstatSync, - statSync: fs.statSync -}; - -export function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter { - if (fsMethods === undefined) { - return FILE_SYSTEM_ADAPTER; - } - - return { - ...FILE_SYSTEM_ADAPTER, - ...fsMethods - }; -} diff --git a/node_modules/@nodelib/fs.stat/src/index.spec.ts b/node_modules/@nodelib/fs.stat/src/index.spec.ts deleted file mode 100644 index 7affed90..00000000 --- a/node_modules/@nodelib/fs.stat/src/index.spec.ts +++ /dev/null @@ -1,70 +0,0 @@ -import * as assert from 'assert'; -import * as fs from 'fs'; - -import * as rimraf from 'rimraf'; - -import { stat, statSync, Settings } from '.'; - -describe('Package', () => { - before(() => { - rimraf.sync('fixtures'); - - fs.mkdirSync('fixtures'); - fs.mkdirSync('fixtures/a'); - fs.symlinkSync('a', 'fixtures/b', 'junction'); - }); - - after(() => { - rimraf.sync('fixtures'); - }); - - describe('.stat', () => { - it('should work without options or settings', (done) => { - stat('fixtures/b', (error, stats) => { - assert.strictEqual(error, null); - assert.ok(stats); - done(); - }); - }); - - it('should work with options', (done) => { - stat('fixtures/b', { markSymbolicLink: true }, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.isSymbolicLink(), true); - done(); - }); - }); - - it('should work with settings', (done) => { - const settings = new Settings({ markSymbolicLink: true }); - - stat('fixtures/b', settings, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.isSymbolicLink(), true); - done(); - }); - }); - }); - - describe('.statSync', () => { - it('should work without options or settings', () => { - const actual = statSync('fixtures/b'); - - assert.ok(actual); - }); - - it('should work with options', () => { - const actual = statSync('fixtures/b', { markSymbolicLink: true }); - - assert.strictEqual(actual.isSymbolicLink(), true); - }); - - it('should work with settings', () => { - const settings = new Settings({ markSymbolicLink: true }); - - const actual = statSync('fixtures/b', settings); - - assert.strictEqual(actual.isSymbolicLink(), true); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.stat/src/index.ts b/node_modules/@nodelib/fs.stat/src/index.ts deleted file mode 100644 index 992f7216..00000000 --- a/node_modules/@nodelib/fs.stat/src/index.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { FileSystemAdapter } from './adapters/fs'; -import * as async from './providers/async'; -import * as sync from './providers/sync'; -import Settings, { Options } from './settings'; -import { Stats } from './types'; - -type AsyncCallback = async.AsyncCallback; - -function stat(path: string, callback: AsyncCallback): void; -function stat(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -function stat(path: string, optionsOrSettingsOrCallback: Options | Settings | AsyncCallback, callback?: AsyncCallback): void { - if (typeof optionsOrSettingsOrCallback === 'function') { - return async.read(path, getSettings(), optionsOrSettingsOrCallback); - } - - async.read(path, getSettings(optionsOrSettingsOrCallback), callback as AsyncCallback); -} - -// https://github.com/typescript-eslint/typescript-eslint/issues/60 -// eslint-disable-next-line no-redeclare -declare namespace stat { - function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; -} - -function statSync(path: string, optionsOrSettings?: Options | Settings): Stats { - const settings = getSettings(optionsOrSettings); - - return sync.read(path, settings); -} - -function getSettings(settingsOrOptions: Settings | Options = {}): Settings { - if (settingsOrOptions instanceof Settings) { - return settingsOrOptions; - } - - return new Settings(settingsOrOptions); -} - -export { - Settings, - stat, - statSync, - - // https://github.com/typescript-eslint/typescript-eslint/issues/131 - // eslint-disable-next-line no-undef - AsyncCallback, - FileSystemAdapter, - Options, - Stats -}; diff --git a/node_modules/@nodelib/fs.stat/src/providers/async.spec.ts b/node_modules/@nodelib/fs.stat/src/providers/async.spec.ts deleted file mode 100644 index 19ecf310..00000000 --- a/node_modules/@nodelib/fs.stat/src/providers/async.spec.ts +++ /dev/null @@ -1,102 +0,0 @@ -import * as assert from 'assert'; -import * as fs from 'fs'; - -import * as sinon from 'sinon'; - -import { Stats } from '../../../fs.macchiato'; -import Settings from '../settings'; -import * as provider from './async'; - -describe('Providers → Async', () => { - describe('.read', () => { - it('should return lstat for non-symlink entry', (done) => { - const lstat = sinon.stub().yields(null, new Stats()) as unknown as typeof fs.lstat; - - const settings = new Settings({ - fs: { lstat } - }); - - provider.read('filepath', settings, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.ino, 0); - done(); - }); - }); - - it('should return lstat for symlink entry when the "followSymbolicLink" option is disabled', (done) => { - const lstat = sinon.stub().yields(null, new Stats({ isSymbolicLink: true })) as unknown as typeof fs.lstat; - - const settings = new Settings({ - followSymbolicLink: false, - fs: { lstat } - }); - - provider.read('filepath', settings, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.ino, 0); - done(); - }); - }); - - it('should return stat for symlink entry', (done) => { - const lstat = sinon.stub().yields(null, new Stats({ isSymbolicLink: true })) as unknown as typeof fs.lstat; - const stat = sinon.stub().yields(null, new Stats({ ino: 1 })) as unknown as typeof fs.stat; - - const settings = new Settings({ - fs: { lstat, stat } - }); - - provider.read('filepath', settings, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.ino, 1); - done(); - }); - }); - - it('should return marked stat for symlink entry when the "markSymbolicLink" option is enabled', (done) => { - const lstat = sinon.stub().yields(null, new Stats({ isSymbolicLink: true })) as unknown as typeof fs.lstat; - const stat = sinon.stub().yields(null, new Stats({ ino: 1 })) as unknown as typeof fs.stat; - - const settings = new Settings({ - fs: { lstat, stat }, - markSymbolicLink: true - }); - - provider.read('filepath', settings, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.isSymbolicLink(), true); - done(); - }); - }); - - it('should return lstat for broken symlink entry when the "throwErrorOnBrokenSymbolicLink" option is disabled', (done) => { - const lstat = sinon.stub().yields(null, new Stats({ isSymbolicLink: true })) as unknown as typeof fs.lstat; - const stat = sinon.stub().yields(new Error()) as unknown as typeof fs.stat; - - const settings = new Settings({ - fs: { lstat, stat }, - throwErrorOnBrokenSymbolicLink: false - }); - - provider.read('filepath', settings, (error, stats) => { - assert.strictEqual(error, null); - assert.strictEqual(stats.ino, 0); - done(); - }); - }); - - it('should throw an error when symlink entry is broken', (done) => { - const lstat = sinon.stub().yields(null, new Stats({ isSymbolicLink: true })) as unknown as typeof fs.lstat; - const stat = sinon.stub().yields(new Error('broken')) as unknown as typeof fs.stat; - - const settings = new Settings({ - fs: { lstat, stat } - }); - - provider.read('filepath', settings, (error) => { - assert.strictEqual(error.message, 'broken'); - done(); - }); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.stat/src/providers/async.ts b/node_modules/@nodelib/fs.stat/src/providers/async.ts deleted file mode 100644 index 1ec6c28a..00000000 --- a/node_modules/@nodelib/fs.stat/src/providers/async.ts +++ /dev/null @@ -1,43 +0,0 @@ -import Settings from '../settings'; -import { ErrnoException, Stats } from '../types'; - -type FailureCallback = (err: ErrnoException) => void; -type SuccessCallback = (err: null, stats: Stats) => void; - -export type AsyncCallback = (err: ErrnoException, stats: Stats) => void; - -export function read(path: string, settings: Settings, callback: AsyncCallback): void { - settings.fs.lstat(path, (lstatError, lstat) => { - if (lstatError !== null) { - return callFailureCallback(callback, lstatError); - } - - if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { - return callSuccessCallback(callback, lstat); - } - - settings.fs.stat(path, (statError, stat) => { - if (statError !== null) { - if (settings.throwErrorOnBrokenSymbolicLink) { - return callFailureCallback(callback, statError); - } - - return callSuccessCallback(callback, lstat); - } - - if (settings.markSymbolicLink) { - stat.isSymbolicLink = () => true; - } - - callSuccessCallback(callback, stat); - }); - }); -} - -function callFailureCallback(callback: AsyncCallback, error: ErrnoException): void { - (callback as FailureCallback)(error); -} - -function callSuccessCallback(callback: AsyncCallback, result: Stats): void { - (callback as unknown as SuccessCallback)(null, result); -} diff --git a/node_modules/@nodelib/fs.stat/src/providers/sync.spec.ts b/node_modules/@nodelib/fs.stat/src/providers/sync.spec.ts deleted file mode 100644 index f027cda4..00000000 --- a/node_modules/@nodelib/fs.stat/src/providers/sync.spec.ts +++ /dev/null @@ -1,90 +0,0 @@ -import * as assert from 'assert'; - -import * as sinon from 'sinon'; - -import { Stats } from '../../../fs.macchiato'; -import Settings from '../settings'; -import * as provider from './sync'; - -describe('Providers → Sync', () => { - describe('.read', () => { - it('should return lstat for non-symlink entry', () => { - const lstatSync = sinon.stub().returns(new Stats()); - - const settings = new Settings({ - fs: { lstatSync } - }); - - const actual = provider.read('filepath', settings); - - assert.strictEqual(actual.ino, 0); - }); - - it('should return lstat for symlink entry when the "followSymbolicLink" option is disabled', () => { - const lstatSync = sinon.stub().returns(new Stats({ isSymbolicLink: true })); - - const settings = new Settings({ - followSymbolicLink: false, - fs: { lstatSync } - }); - - const actual = provider.read('filepath', settings); - - assert.strictEqual(actual.ino, 0); - }); - - it('should return stat for symlink entry', () => { - const lstatSync = sinon.stub().returns(new Stats({ isSymbolicLink: true })); - const statSync = sinon.stub().returns(new Stats({ ino: 1 })); - - const settings = new Settings({ - fs: { lstatSync, statSync } - }); - - const actual = provider.read('filepath', settings); - - assert.strictEqual(actual.ino, 1); - }); - - it('should return marked stat for symlink entry when the "markSymbolicLink" option is enabled', () => { - const lstatSync = sinon.stub().returns(new Stats({ isSymbolicLink: true })); - const statSync = sinon.stub().returns(new Stats({ ino: 1 })); - - const settings = new Settings({ - markSymbolicLink: true, - fs: { lstatSync, statSync } - }); - - const actual = provider.read('filepath', settings); - - assert.strictEqual(actual.isSymbolicLink(), true); - }); - - it('should return lstat for broken symlink entry when the "throwErrorOnBrokenSymbolicLink" option is disabled', () => { - const lstatSync = sinon.stub().returns(new Stats({ isSymbolicLink: true })); - const statSync = sinon.stub().throws(new Error('error')); - - const settings = new Settings({ - fs: { lstatSync, statSync }, - throwErrorOnBrokenSymbolicLink: false - }); - - const actual = provider.read('filepath', settings); - - assert.strictEqual(actual.ino, 0); - }); - - it('should throw an error when symlink entry is broken', () => { - const lstatSync = sinon.stub().returns(new Stats({ isSymbolicLink: true })); - const statSync = sinon.stub().throws(new Error('broken')); - - const settings = new Settings({ - fs: { lstatSync, statSync } - }); - - const expectedErrorMessageRe = /broken/; - - assert.throws(() => provider.read('filepath', settings), expectedErrorMessageRe); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.stat/src/providers/sync.ts b/node_modules/@nodelib/fs.stat/src/providers/sync.ts deleted file mode 100644 index a1a36ceb..00000000 --- a/node_modules/@nodelib/fs.stat/src/providers/sync.ts +++ /dev/null @@ -1,26 +0,0 @@ -import Settings from '../settings'; -import { Stats } from '../types'; - -export function read(path: string, settings: Settings): Stats { - const lstat = settings.fs.lstatSync(path); - - if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { - return lstat; - } - - try { - const stat = settings.fs.statSync(path); - - if (settings.markSymbolicLink) { - stat.isSymbolicLink = () => true; - } - - return stat; - } catch (error) { - if (!settings.throwErrorOnBrokenSymbolicLink) { - return lstat; - } - - throw error; - } -} diff --git a/node_modules/@nodelib/fs.stat/src/settings.spec.ts b/node_modules/@nodelib/fs.stat/src/settings.spec.ts deleted file mode 100644 index d8d33b8d..00000000 --- a/node_modules/@nodelib/fs.stat/src/settings.spec.ts +++ /dev/null @@ -1,31 +0,0 @@ -import * as assert from 'assert'; - -import { Stats } from '../../fs.macchiato'; - -import * as fs from './adapters/fs'; -import Settings from './settings'; - -describe('Settings', () => { - it('should return instance with default values', () => { - const settings = new Settings(); - - assert.deepStrictEqual(settings.fs, fs.createFileSystemAdapter()); - assert.ok(settings.throwErrorOnBrokenSymbolicLink); - assert.ok(!settings.markSymbolicLink); - assert.ok(settings.followSymbolicLink); - }); - - it('should return instance with custom values', () => { - const lstatSync = (): Stats => new Stats(); - - const settings = new Settings({ - followSymbolicLink: false, - fs: fs.createFileSystemAdapter({ lstatSync }), - throwErrorOnBrokenSymbolicLink: false - }); - - assert.deepStrictEqual(settings.fs, fs.createFileSystemAdapter({ lstatSync })); - assert.ok(!settings.throwErrorOnBrokenSymbolicLink); - assert.ok(!settings.followSymbolicLink); - }); -}); diff --git a/node_modules/@nodelib/fs.stat/src/settings.ts b/node_modules/@nodelib/fs.stat/src/settings.ts deleted file mode 100644 index e9d8a70b..00000000 --- a/node_modules/@nodelib/fs.stat/src/settings.ts +++ /dev/null @@ -1,21 +0,0 @@ -import * as fs from './adapters/fs'; - -export type Options = { - followSymbolicLink?: boolean; - fs?: Partial; - markSymbolicLink?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -}; - -export default class Settings { - public readonly followSymbolicLink: boolean = this._getValue(this._options.followSymbolicLink, true); - public readonly fs: fs.FileSystemAdapter = fs.createFileSystemAdapter(this._options.fs); - public readonly markSymbolicLink: boolean = this._getValue(this._options.markSymbolicLink, false); - public readonly throwErrorOnBrokenSymbolicLink: boolean = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); - - constructor(private readonly _options: Options = {}) { } - - private _getValue(option: T | undefined, value: T): T { - return option ?? value; - } -} diff --git a/node_modules/@nodelib/fs.stat/src/types/index.ts b/node_modules/@nodelib/fs.stat/src/types/index.ts deleted file mode 100644 index f12709a5..00000000 --- a/node_modules/@nodelib/fs.stat/src/types/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -import * as fs from 'fs'; - -export type Stats = fs.Stats; -export type ErrnoException = NodeJS.ErrnoException; diff --git a/node_modules/@nodelib/fs.stat/tsconfig.json b/node_modules/@nodelib/fs.stat/tsconfig.json deleted file mode 100644 index 7a98c7bf..00000000 --- a/node_modules/@nodelib/fs.stat/tsconfig.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "extends": "../../../tsconfig.json", - "compilerOptions": { - "rootDir": "src", - "outDir": "out" - }, - "references": [ - { - "path": "../fs.macchiato" - } - ] -} diff --git a/node_modules/@nodelib/fs.stat/tsconfig.tsbuildinfo b/node_modules/@nodelib/fs.stat/tsconfig.tsbuildinfo deleted file mode 100644 index 04bbd19c..00000000 --- a/node_modules/@nodelib/fs.stat/tsconfig.tsbuildinfo +++ /dev/null @@ -1,1570 +0,0 @@ -{ - "program": { - "fileInfos": { - "../../../node_modules/typescript/lib/lib.es5.d.ts": { - "version": "70ae6416528e68c2ee7b62892200d2ca631759943d4429f8b779b947ff1e124d", - "signature": "70ae6416528e68c2ee7b62892200d2ca631759943d4429f8b779b947ff1e124d", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.d.ts": { - "version": "dc47c4fa66b9b9890cf076304de2a9c5201e94b740cffdf09f87296d877d71f6", - "signature": "dc47c4fa66b9b9890cf076304de2a9c5201e94b740cffdf09f87296d877d71f6", - "affectsGlobalScope": false - }, - "../../../node_modules/typescript/lib/lib.es2016.d.ts": { - "version": "7a387c58583dfca701b6c85e0adaf43fb17d590fb16d5b2dc0a2fbd89f35c467", - "signature": "7a387c58583dfca701b6c85e0adaf43fb17d590fb16d5b2dc0a2fbd89f35c467", - "affectsGlobalScope": false - }, - "../../../node_modules/typescript/lib/lib.es2017.d.ts": { - "version": "8a12173c586e95f4433e0c6dc446bc88346be73ffe9ca6eec7aa63c8f3dca7f9", - "signature": "8a12173c586e95f4433e0c6dc446bc88346be73ffe9ca6eec7aa63c8f3dca7f9", - "affectsGlobalScope": false - }, - "../../../node_modules/typescript/lib/lib.es2018.d.ts": { - "version": "5f4e733ced4e129482ae2186aae29fde948ab7182844c3a5a51dd346182c7b06", - "signature": "5f4e733ced4e129482ae2186aae29fde948ab7182844c3a5a51dd346182c7b06", - "affectsGlobalScope": false - }, - "../../../node_modules/typescript/lib/lib.dom.d.ts": { - "version": "9affb0a2ddc57df5b8174c0af96c288d697a262e5bc9ca1f544c999dc64a91e6", - "signature": "9affb0a2ddc57df5b8174c0af96c288d697a262e5bc9ca1f544c999dc64a91e6", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.dom.iterable.d.ts": { - "version": "fb0c09b697dc42afa84d1587e3c994a2f554d2a45635e4f0618768d16a86b69a", - "signature": "fb0c09b697dc42afa84d1587e3c994a2f554d2a45635e4f0618768d16a86b69a", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.webworker.importscripts.d.ts": { - "version": "7fac8cb5fc820bc2a59ae11ef1c5b38d3832c6d0dfaec5acdb5569137d09a481", - "signature": "7fac8cb5fc820bc2a59ae11ef1c5b38d3832c6d0dfaec5acdb5569137d09a481", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.scripthost.d.ts": { - "version": "097a57355ded99c68e6df1b738990448e0bf170e606707df5a7c0481ff2427cd", - "signature": "097a57355ded99c68e6df1b738990448e0bf170e606707df5a7c0481ff2427cd", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.core.d.ts": { - "version": "63e0cc12d0f77394094bd19e84464f9840af0071e5b9358ced30511efef1d8d2", - "signature": "63e0cc12d0f77394094bd19e84464f9840af0071e5b9358ced30511efef1d8d2", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.collection.d.ts": { - "version": "43fb1d932e4966a39a41b464a12a81899d9ae5f2c829063f5571b6b87e6d2f9c", - "signature": "43fb1d932e4966a39a41b464a12a81899d9ae5f2c829063f5571b6b87e6d2f9c", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.generator.d.ts": { - "version": "cdccba9a388c2ee3fd6ad4018c640a471a6c060e96f1232062223063b0a5ac6a", - "signature": "cdccba9a388c2ee3fd6ad4018c640a471a6c060e96f1232062223063b0a5ac6a", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.iterable.d.ts": { - "version": "42f5e41e5893da663dbf0394268f54f1da4b43dc0ddd2ea4bf471fe5361d6faf", - "signature": "42f5e41e5893da663dbf0394268f54f1da4b43dc0ddd2ea4bf471fe5361d6faf", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.promise.d.ts": { - "version": "0b7a905675e6cb4211c128f0a3aa47d414b275180a299a9aad5d3ec298abbfc4", - "signature": "0b7a905675e6cb4211c128f0a3aa47d414b275180a299a9aad5d3ec298abbfc4", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.proxy.d.ts": { - "version": "dfff68b3c34338f6b307a25d4566de15eed7973b0dc5d69f9fde2bcac1c25315", - "signature": "dfff68b3c34338f6b307a25d4566de15eed7973b0dc5d69f9fde2bcac1c25315", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.reflect.d.ts": { - "version": "cb609802a8698aa28b9c56331d4b53f590ca3c1c3a255350304ae3d06017779d", - "signature": "cb609802a8698aa28b9c56331d4b53f590ca3c1c3a255350304ae3d06017779d", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.symbol.d.ts": { - "version": "3013574108c36fd3aaca79764002b3717da09725a36a6fc02eac386593110f93", - "signature": "3013574108c36fd3aaca79764002b3717da09725a36a6fc02eac386593110f93", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.symbol.wellknown.d.ts": { - "version": "4670208dd7da9d6c774ab1b75c1527a810388c7989c4905de6aaea8561cb9dce", - "signature": "4670208dd7da9d6c774ab1b75c1527a810388c7989c4905de6aaea8561cb9dce", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2016.array.include.d.ts": { - "version": "3be5a1453daa63e031d266bf342f3943603873d890ab8b9ada95e22389389006", - "signature": "3be5a1453daa63e031d266bf342f3943603873d890ab8b9ada95e22389389006", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.object.d.ts": { - "version": "17bb1fc99591b00515502d264fa55dc8370c45c5298f4a5c2083557dccba5a2a", - "signature": "17bb1fc99591b00515502d264fa55dc8370c45c5298f4a5c2083557dccba5a2a", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.sharedmemory.d.ts": { - "version": "d0db416bccdb33975548baf09a42ee8c47eace1aac7907351a000f1e568e7232", - "signature": "d0db416bccdb33975548baf09a42ee8c47eace1aac7907351a000f1e568e7232", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.string.d.ts": { - "version": "6a6b173e739a6a99629a8594bfb294cc7329bfb7b227f12e1f7c11bc163b8577", - "signature": "6a6b173e739a6a99629a8594bfb294cc7329bfb7b227f12e1f7c11bc163b8577", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.intl.d.ts": { - "version": "12a310447c5d23c7d0d5ca2af606e3bd08afda69100166730ab92c62999ebb9d", - "signature": "12a310447c5d23c7d0d5ca2af606e3bd08afda69100166730ab92c62999ebb9d", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.typedarrays.d.ts": { - "version": "b0124885ef82641903d232172577f2ceb5d3e60aed4da1153bab4221e1f6dd4e", - "signature": "b0124885ef82641903d232172577f2ceb5d3e60aed4da1153bab4221e1f6dd4e", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.asyncgenerator.d.ts": { - "version": "0eb85d6c590b0d577919a79e0084fa1744c1beba6fd0d4e951432fa1ede5510a", - "signature": "0eb85d6c590b0d577919a79e0084fa1744c1beba6fd0d4e951432fa1ede5510a", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.asynciterable.d.ts": { - "version": "a40c4d82bf13fcded295ac29f354eb7d40249613c15e07b53f2fc75e45e16359", - "signature": "a40c4d82bf13fcded295ac29f354eb7d40249613c15e07b53f2fc75e45e16359", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.intl.d.ts": { - "version": "df9c8a72ca8b0ed62f5470b41208a0587f0f73f0a7db28e5a1272cf92537518e", - "signature": "df9c8a72ca8b0ed62f5470b41208a0587f0f73f0a7db28e5a1272cf92537518e", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.promise.d.ts": { - "version": "bb2d3fb05a1d2ffbca947cc7cbc95d23e1d053d6595391bd325deb265a18d36c", - "signature": "bb2d3fb05a1d2ffbca947cc7cbc95d23e1d053d6595391bd325deb265a18d36c", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.regexp.d.ts": { - "version": "c80df75850fea5caa2afe43b9949338ce4e2de086f91713e9af1a06f973872b8", - "signature": "c80df75850fea5caa2afe43b9949338ce4e2de086f91713e9af1a06f973872b8", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2020.bigint.d.ts": { - "version": "4f435f794b7853c55e2ae7cff6206025802aa79232d2867544178f2ca8ff5eaa", - "signature": "4f435f794b7853c55e2ae7cff6206025802aa79232d2867544178f2ca8ff5eaa", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.esnext.intl.d.ts": { - "version": "89bf2b7a601b73ea4311eda9c41f86a58994fec1bee3b87c4a14d68d9adcdcbd", - "signature": "89bf2b7a601b73ea4311eda9c41f86a58994fec1bee3b87c4a14d68d9adcdcbd", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.full.d.ts": { - "version": "d2f31f19e1ba6ed59be9259d660a239d9a3fcbbc8e038c6b2009bde34b175fed", - "signature": "d2f31f19e1ba6ed59be9259d660a239d9a3fcbbc8e038c6b2009bde34b175fed", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/globals.d.ts": { - "version": "74d61a149bea97a20b324410e4520796ffc36dcf35b54f03cfd0cfe922bb61cc", - "signature": "74d61a149bea97a20b324410e4520796ffc36dcf35b54f03cfd0cfe922bb61cc", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/node/async_hooks.d.ts": { - "version": "950e73fe3bcda768b5f593cec3f7137bb7cab709a82be89dd08c2a20568a28e2", - "signature": "950e73fe3bcda768b5f593cec3f7137bb7cab709a82be89dd08c2a20568a28e2", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/buffer.d.ts": { - "version": "61215c1a376bbe8f51cab4cc4ddbf3746387015113c37a84d981d4738c21b878", - "signature": "61215c1a376bbe8f51cab4cc4ddbf3746387015113c37a84d981d4738c21b878", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/child_process.d.ts": { - "version": "5eca801fb67009c5728b88793670f0137b5e31a8f7d1576d5110a1276feaba8c", - "signature": "5eca801fb67009c5728b88793670f0137b5e31a8f7d1576d5110a1276feaba8c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/cluster.d.ts": { - "version": "ce629710e5e58724902b753212e97861fd73e2aa09f5d88cb6d55dc763cf8c8a", - "signature": "ce629710e5e58724902b753212e97861fd73e2aa09f5d88cb6d55dc763cf8c8a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/console.d.ts": { - "version": "525c8fc510d9632d2a0a9de2d41c3ac1cdd79ff44d3b45c6d81cacabb683528d", - "signature": "525c8fc510d9632d2a0a9de2d41c3ac1cdd79ff44d3b45c6d81cacabb683528d", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/constants.d.ts": { - "version": "0279383034fae92db8097d0a41350293553599cc9c3c917b60e2542d0dfcbd44", - "signature": "0279383034fae92db8097d0a41350293553599cc9c3c917b60e2542d0dfcbd44", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/crypto.d.ts": { - "version": "9b9f8b151698fb1798f04b8375e240c764f094e730192e6a5353abdb1c709d6f", - "signature": "9b9f8b151698fb1798f04b8375e240c764f094e730192e6a5353abdb1c709d6f", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/dgram.d.ts": { - "version": "37c4598a5f2025c97492e18bed8909ccd10bf26bb5f54d5f6009f9153291af91", - "signature": "37c4598a5f2025c97492e18bed8909ccd10bf26bb5f54d5f6009f9153291af91", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/dns.d.ts": { - "version": "ef226a42de7022eacdfa0f15aabf73b46c47af93044c8ebfab8aa8e3cf6c330c", - "signature": "ef226a42de7022eacdfa0f15aabf73b46c47af93044c8ebfab8aa8e3cf6c330c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/domain.d.ts": { - "version": "d5b7c8819ce1bd31a45f7675309e145ec28e3aa1b60a8e0637fd0e8916255baa", - "signature": "d5b7c8819ce1bd31a45f7675309e145ec28e3aa1b60a8e0637fd0e8916255baa", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/events.d.ts": { - "version": "76048f3c7325a6c1fa6306d40eb0c8570fa0209d09472d46f9b1221f66edae6f", - "signature": "76048f3c7325a6c1fa6306d40eb0c8570fa0209d09472d46f9b1221f66edae6f", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/fs.d.ts": { - "version": "03be37150cc8fe48fd243169653f15149e0ed4a34eea0cae027b708d39eb01f8", - "signature": "03be37150cc8fe48fd243169653f15149e0ed4a34eea0cae027b708d39eb01f8", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/http.d.ts": { - "version": "f50ba0d2d8f891fa11326db36e6c25fe14bce747cf2bd9b554de3bb2a814f49c", - "signature": "f50ba0d2d8f891fa11326db36e6c25fe14bce747cf2bd9b554de3bb2a814f49c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/http2.d.ts": { - "version": "48b53111cc4ce136803fbf857cd8de2d5df33895b1af714a87caf87562182e46", - "signature": "48b53111cc4ce136803fbf857cd8de2d5df33895b1af714a87caf87562182e46", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/https.d.ts": { - "version": "dacbe08610729f6343ea9880ea8e737c6d7a6efa4a318d8f6acaf85db4aceed6", - "signature": "dacbe08610729f6343ea9880ea8e737c6d7a6efa4a318d8f6acaf85db4aceed6", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/inspector.d.ts": { - "version": "4218ced3933a31eed1278d350dd63c5900df0f0904f57d61c054d7a4b83dbe4c", - "signature": "4218ced3933a31eed1278d350dd63c5900df0f0904f57d61c054d7a4b83dbe4c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/module.d.ts": { - "version": "03394bf8deb8781b490ae9266a843fbdf00647947d79e25fcbf1d89a9e9c8a66", - "signature": "03394bf8deb8781b490ae9266a843fbdf00647947d79e25fcbf1d89a9e9c8a66", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/net.d.ts": { - "version": "358398fe4034395d85c87c319cca7a04001434b13dc68d067481ecb374385bfc", - "signature": "358398fe4034395d85c87c319cca7a04001434b13dc68d067481ecb374385bfc", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/os.d.ts": { - "version": "d9bc6f1917c24d862a68d2633e4a32fd586bfe3e412e5d11fd07d8266b94ced5", - "signature": "d9bc6f1917c24d862a68d2633e4a32fd586bfe3e412e5d11fd07d8266b94ced5", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/path.d.ts": { - "version": "5fb30076f0e0e5744db8993648bfb67aadd895f439edad5cce039127a87a8a36", - "signature": "5fb30076f0e0e5744db8993648bfb67aadd895f439edad5cce039127a87a8a36", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/perf_hooks.d.ts": { - "version": "93a8a589862b5ac8fd8bb46426f7b081ba825a5171337dd45de9bf141624d55e", - "signature": "93a8a589862b5ac8fd8bb46426f7b081ba825a5171337dd45de9bf141624d55e", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/process.d.ts": { - "version": "0e0d58f5e90c0a270dac052b9c5ad8ccdfc8271118c2105b361063218d528d6e", - "signature": "0e0d58f5e90c0a270dac052b9c5ad8ccdfc8271118c2105b361063218d528d6e", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/node/punycode.d.ts": { - "version": "3f6a1fd73c9dc3bd7f4b79bc075297ca6527904df69b0f2c2c94e4c4c7d9a32c", - "signature": "3f6a1fd73c9dc3bd7f4b79bc075297ca6527904df69b0f2c2c94e4c4c7d9a32c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/querystring.d.ts": { - "version": "758948c06a0d02623c7d4ed357ffa79bdc170de6e004046678774a1bfa9a29bb", - "signature": "758948c06a0d02623c7d4ed357ffa79bdc170de6e004046678774a1bfa9a29bb", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/readline.d.ts": { - "version": "2ca26a43dec700c4b0bdc04b123094f4becffda70e3960f3e10b025f7a15ba8f", - "signature": "2ca26a43dec700c4b0bdc04b123094f4becffda70e3960f3e10b025f7a15ba8f", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/repl.d.ts": { - "version": "27c3f3f672a6ce267f7cc34643231032016fa4b6d195c0725db570de0a7a9f91", - "signature": "27c3f3f672a6ce267f7cc34643231032016fa4b6d195c0725db570de0a7a9f91", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/stream.d.ts": { - "version": "9c581919a8c483f5080487ae8ec1dd398d94027aedf8e77436085e7fab23951a", - "signature": "9c581919a8c483f5080487ae8ec1dd398d94027aedf8e77436085e7fab23951a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/string_decoder.d.ts": { - "version": "7e62aac2cc9c0710d772047ad89e8d7117f52592c791eb995ce1f865fedab432", - "signature": "7e62aac2cc9c0710d772047ad89e8d7117f52592c791eb995ce1f865fedab432", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/timers.d.ts": { - "version": "b40652bf8ce4a18133b31349086523b219724dca8df3448c1a0742528e7ad5b9", - "signature": "b40652bf8ce4a18133b31349086523b219724dca8df3448c1a0742528e7ad5b9", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/tls.d.ts": { - "version": "48064f81a8354d04808e7b5bddf570aaf19f894cf1d8a2aa1f56c81decd33508", - "signature": "48064f81a8354d04808e7b5bddf570aaf19f894cf1d8a2aa1f56c81decd33508", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/trace_events.d.ts": { - "version": "a77fdb357c78b70142b2fdbbfb72958d69e8f765fd2a3c69946c1018e89d4638", - "signature": "a77fdb357c78b70142b2fdbbfb72958d69e8f765fd2a3c69946c1018e89d4638", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/tty.d.ts": { - "version": "3c2ac350c3baa61fd2b1925844109e098f4376d0768a4643abc82754fd752748", - "signature": "3c2ac350c3baa61fd2b1925844109e098f4376d0768a4643abc82754fd752748", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/url.d.ts": { - "version": "834545a7726e414890371aec1a89b7915963e08e790e093259e8bed429ef15c6", - "signature": "834545a7726e414890371aec1a89b7915963e08e790e093259e8bed429ef15c6", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/util.d.ts": { - "version": "b248fb69886bce18cf6650491f43f0326ed6d59c8fdf7fd63dbd35bf4ef3e2bc", - "signature": "b248fb69886bce18cf6650491f43f0326ed6d59c8fdf7fd63dbd35bf4ef3e2bc", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/v8.d.ts": { - "version": "4407bd5f1d6f748590ba125195eb1d7a003c2de2f3b057456d3ac76a742d2561", - "signature": "4407bd5f1d6f748590ba125195eb1d7a003c2de2f3b057456d3ac76a742d2561", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/vm.d.ts": { - "version": "2b57b7d7191c6e2efc2ed4f87cf1e25c383278ac5d019670406508df42dc34f3", - "signature": "2b57b7d7191c6e2efc2ed4f87cf1e25c383278ac5d019670406508df42dc34f3", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/worker_threads.d.ts": { - "version": "46f0413ecc0d83b047d46dbe03a37c7c760f59f0bb9a8633150e2d9335870675", - "signature": "46f0413ecc0d83b047d46dbe03a37c7c760f59f0bb9a8633150e2d9335870675", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/zlib.d.ts": { - "version": "2ea98f43cfae8dfbefc45d8bd1ec4907bbad33d18203ea8ef8b50d36b97afa35", - "signature": "2ea98f43cfae8dfbefc45d8bd1ec4907bbad33d18203ea8ef8b50d36b97afa35", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/ts3.3/base.d.ts": { - "version": "067b1964df87a4fc98ebffbd2bada6d7ed14a5b032f9071ea39478d82e701a99", - "signature": "067b1964df87a4fc98ebffbd2bada6d7ed14a5b032f9071ea39478d82e701a99", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/globals.global.d.ts": { - "version": "2708349d5a11a5c2e5f3a0765259ebe7ee00cdcc8161cb9990cb4910328442a1", - "signature": "2708349d5a11a5c2e5f3a0765259ebe7ee00cdcc8161cb9990cb4910328442a1", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/node/wasi.d.ts": { - "version": "14a6a3cee450438254c004a6b4f1191ec9977186bdeda07764f2a8d90ef71117", - "signature": "14a6a3cee450438254c004a6b4f1191ec9977186bdeda07764f2a8d90ef71117", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/ts3.6/base.d.ts": { - "version": "d170ea32762c00c660740f2cc0ca9526290ab9d9fb9c72282c1fa53cd1a7728e", - "signature": "d170ea32762c00c660740f2cc0ca9526290ab9d9fb9c72282c1fa53cd1a7728e", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/assert.d.ts": { - "version": "54b2276780dc8d538a71b954b87ea081a1e9f90e7f1195f2daf2bddde0bf52df", - "signature": "54b2276780dc8d538a71b954b87ea081a1e9f90e7f1195f2daf2bddde0bf52df", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/base.d.ts": { - "version": "e61a21e9418f279bc480394a94d1581b2dee73747adcbdef999b6737e34d721b", - "signature": "e61a21e9418f279bc480394a94d1581b2dee73747adcbdef999b6737e34d721b", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/index.d.ts": { - "version": "6fa68653382bd571bc63831e9f9c1307cc52f7310c1470463fe429d84147667d", - "signature": "6fa68653382bd571bc63831e9f9c1307cc52f7310c1470463fe429d84147667d", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/minimatch/index.d.ts": { - "version": "1d1e6bd176eee5970968423d7e215bfd66828b6db8d54d17afec05a831322633", - "signature": "1d1e6bd176eee5970968423d7e215bfd66828b6db8d54d17afec05a831322633", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/glob/index.d.ts": { - "version": "393137c76bd922ba70a2f8bf1ade4f59a16171a02fb25918c168d48875b0cfb0", - "signature": "393137c76bd922ba70a2f8bf1ade4f59a16171a02fb25918c168d48875b0cfb0", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/rimraf/index.d.ts": { - "version": "6462324ef579c47415610a63f1aa8b72f5b5114f8fe8307967f9add2bca634f5", - "signature": "6462324ef579c47415610a63f1aa8b72f5b5114f8fe8307967f9add2bca634f5", - "affectsGlobalScope": false - }, - "./src/adapters/fs.ts": { - "version": "fbff8c8901179945f364b5feeba4357dfae786b6e005ff4f0edd6784067bb2e6", - "signature": "ceebf93146ac7b3f85276a2501de57c5cf5bb19742944c958bd831f995b41409", - "affectsGlobalScope": false - }, - "./src/settings.ts": { - "version": "26286034a5898f7d69d87a8b3301ea15e18b23f9d8ee585bae78e8662cd7913e", - "signature": "30c47bd1f03a220a10e8c11708a2c73c04135999ca1a35271605f9683d36b432", - "affectsGlobalScope": false - }, - "./src/types/index.ts": { - "version": "78d675089b599dbe851a60f0a835c14787df11e086c11d6918e5f67db765f37f", - "signature": "8b9fa6dfb2bec7abe9937fe049505d896550b2ad600cb7114b6fe2813b5cf180", - "affectsGlobalScope": false - }, - "./src/providers/async.ts": { - "version": "96c019f1577a7d6435428c45190716b4f07be9f595166e4c7f503a1f6a9f8fe2", - "signature": "c692034610ac35559227657172f6f76581ee7b16c319c7d5973e19b650f11b9f", - "affectsGlobalScope": false - }, - "./src/providers/sync.ts": { - "version": "1fe18b4ba59ebbd46366867349dbc8dc9cc247ee921ce7ca476b7599321fd44e", - "signature": "59a8d722db79a6f25de70007977f2ab2cbc2c2ff6c1ad35a7d3479d6277233e2", - "affectsGlobalScope": false - }, - "./src/index.ts": { - "version": "2f0e89d618d76eb0b7bba6630b1c2698fdf173fbfd51f0934b6a4c0a1596ee4f", - "signature": "079488cc4bf1eef64297994ef8719c078a86380610beea1d1a920c9436997967", - "affectsGlobalScope": false - }, - "./src/index.spec.ts": { - "version": "ebbe92f1114d1b516b2be588b4a754ee03f8ea1fc5c697572add78a057123548", - "signature": "a900cdf2c35bba00b0363cc950bbf88b887976e70a9eae929dad35ef964109d9", - "affectsGlobalScope": false - }, - "../fs.macchiato/out/types.d.ts": { - "version": "47b605d1e61f92f418c7879051e5458f8ec00aeacac419a37754066fec42b9ba", - "signature": "47b605d1e61f92f418c7879051e5458f8ec00aeacac419a37754066fec42b9ba", - "affectsGlobalScope": false - }, - "../fs.macchiato/out/dirent.d.ts": { - "version": "98387ef539ccca1023a5934f6ea2dd87ee8a6c87db31ec7986b9da016c66fc16", - "signature": "98387ef539ccca1023a5934f6ea2dd87ee8a6c87db31ec7986b9da016c66fc16", - "affectsGlobalScope": false - }, - "../fs.macchiato/out/stats.d.ts": { - "version": "7c70ba0c69002f78ddac880f0096de5b0e78248cf680c1fe1a89439dbf069c5d", - "signature": "7c70ba0c69002f78ddac880f0096de5b0e78248cf680c1fe1a89439dbf069c5d", - "affectsGlobalScope": false - }, - "../fs.macchiato/out/index.d.ts": { - "version": "e00937f585b9c2f95d9d4e00b4e76427eb9516c70a4470d805451ba2ea00044e", - "signature": "e00937f585b9c2f95d9d4e00b4e76427eb9516c70a4470d805451ba2ea00044e", - "affectsGlobalScope": false - }, - "./src/settings.spec.ts": { - "version": "202fc385a602cd5ca176ade46ee0ae0020e5dcfbf12f12e9fc1e57f86ebc9e2b", - "signature": "714d2bb322e0442caf181768f049abd17a96d328d87169c2e6c13a86839c4463", - "affectsGlobalScope": false - }, - "./src/adapters/fs.spec.ts": { - "version": "c2643809431e7e7efde3d5788a889e0368005436bdb7f35925ef4e8ab758d99e", - "signature": "bd7314ded2b0851e1bb0834dc068cb4d18cdbecc9e965e8a0f4952ac3ee4610c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/sinon/ts3.1/index.d.ts": { - "version": "168435ab3390620aebf1aa0001b380983582d0849755eeb17f2c501d1fc57587", - "signature": "168435ab3390620aebf1aa0001b380983582d0849755eeb17f2c501d1fc57587", - "affectsGlobalScope": false - }, - "./src/providers/async.spec.ts": { - "version": "b39f1ed99d2e69f3d7d4f9f99cd931a080a02bb0071004b8082414f5e44bebea", - "signature": "6ff501c2b9280fbf7322044c48dff6eea6849df3b6ab6844facd9d789988a2c9", - "affectsGlobalScope": false - }, - "./src/providers/sync.spec.ts": { - "version": "ced98c0745c28af2360f4ecc3ea973384b369cc78d9e84f27603e162eef8b40d", - "signature": "be22d8b5a836edfac7c9c5ef03e98058ec89f0b98edef8e54ea410187b0bda28", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/eslint-visitor-keys/index.d.ts": { - "version": "725d9be2fd48440256f4deb00649adffdbc5ecd282b09e89d4e200663792c34c", - "signature": "725d9be2fd48440256f4deb00649adffdbc5ecd282b09e89d4e200663792c34c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/fs-extra/index.d.ts": { - "version": "aca36e2d27783f4bad7fc1786a532ff76024f0fc8575df48bcd9a5eb452fe7e7", - "signature": "aca36e2d27783f4bad7fc1786a532ff76024f0fc8575df48bcd9a5eb452fe7e7", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/highlight.js/index.d.ts": { - "version": "21a2fa3722dc0baba2649e040c3121eb38ce84f5afe35ff1c20276132eaa2f2c", - "signature": "21a2fa3722dc0baba2649e040c3121eb38ce84f5afe35ff1c20276132eaa2f2c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/json-schema/index.d.ts": { - "version": "b2be568d8ce95fcb26eebd04c035d94825655fdf689bf67d799f5ff8cbbb1024", - "signature": "b2be568d8ce95fcb26eebd04c035d94825655fdf689bf67d799f5ff8cbbb1024", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/common.d.ts": { - "version": "3594c022901a1c8993b0f78a3f534cfb81e7b619ed215348f7f6882f3db02abc", - "signature": "3594c022901a1c8993b0f78a3f534cfb81e7b619ed215348f7f6882f3db02abc", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/array.d.ts": { - "version": "d03a1ae3d39f757c9f22e4e775b940a98d86bb50ec85529b59e32a17b65c2b90", - "signature": "d03a1ae3d39f757c9f22e4e775b940a98d86bb50ec85529b59e32a17b65c2b90", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/collection.d.ts": { - "version": "0c75b204aed9cf6ff1c7b4bed87a3ece0d9d6fc857a6350c0c95ed0c38c814e8", - "signature": "0c75b204aed9cf6ff1c7b4bed87a3ece0d9d6fc857a6350c0c95ed0c38c814e8", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/date.d.ts": { - "version": "187119ff4f9553676a884e296089e131e8cc01691c546273b1d0089c3533ce42", - "signature": "187119ff4f9553676a884e296089e131e8cc01691c546273b1d0089c3533ce42", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/function.d.ts": { - "version": "c9f396e71966bd3a890d8a36a6a497dbf260e9b868158ea7824d4b5421210afe", - "signature": "c9f396e71966bd3a890d8a36a6a497dbf260e9b868158ea7824d4b5421210afe", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/lang.d.ts": { - "version": "509235563ea2b939e1bbe92aae17e71e6a82ceab8f568b45fb4fce7d72523a32", - "signature": "509235563ea2b939e1bbe92aae17e71e6a82ceab8f568b45fb4fce7d72523a32", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/math.d.ts": { - "version": "9364c7566b0be2f7b70ff5285eb34686f83ccb01bda529b82d23b2a844653bfb", - "signature": "9364c7566b0be2f7b70ff5285eb34686f83ccb01bda529b82d23b2a844653bfb", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/number.d.ts": { - "version": "00baffbe8a2f2e4875367479489b5d43b5fc1429ecb4a4cc98cfc3009095f52a", - "signature": "00baffbe8a2f2e4875367479489b5d43b5fc1429ecb4a4cc98cfc3009095f52a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/object.d.ts": { - "version": "c311349ec71bb69399ffc4092853e7d8a86c1ca39ddb4cd129e775c19d985793", - "signature": "c311349ec71bb69399ffc4092853e7d8a86c1ca39ddb4cd129e775c19d985793", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/seq.d.ts": { - "version": "3c92b6dfd43cc1c2485d9eba5ff0b74a19bb8725b692773ef1d66dac48cda4bd", - "signature": "3c92b6dfd43cc1c2485d9eba5ff0b74a19bb8725b692773ef1d66dac48cda4bd", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/string.d.ts": { - "version": "4908e4c00832b26ce77a629de8501b0e23a903c094f9e79a7fec313a15da796a", - "signature": "4908e4c00832b26ce77a629de8501b0e23a903c094f9e79a7fec313a15da796a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/util.d.ts": { - "version": "2630a7cbb597e85d713b7ef47f2946d4280d3d4c02733282770741d40672b1a5", - "signature": "2630a7cbb597e85d713b7ef47f2946d4280d3d4c02733282770741d40672b1a5", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/index.d.ts": { - "version": "0714e2046df66c0e93c3330d30dbc0565b3e8cd3ee302cf99e4ede6220e5fec8", - "signature": "0714e2046df66c0e93c3330d30dbc0565b3e8cd3ee302cf99e4ede6220e5fec8", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/marked/index.d.ts": { - "version": "c08a5e873738f5576ae1ca5810b5ebc30509f05bde56c3a3bbdd75d6c0806e6a", - "signature": "c08a5e873738f5576ae1ca5810b5ebc30509f05bde56c3a3bbdd75d6c0806e6a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/minimist/index.d.ts": { - "version": "e437d83044ba17246a861aa9691aa14223ff4a9d6f338ab1269c41c758586a88", - "signature": "e437d83044ba17246a861aa9691aa14223ff4a9d6f338ab1269c41c758586a88", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/mocha/index.d.ts": { - "version": "c4c03cf65951d980ba618ae9601d10438730803fc9c8a1f7b34af8739981e205", - "signature": "c4c03cf65951d980ba618ae9601d10438730803fc9c8a1f7b34af8739981e205", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/normalize-package-data/index.d.ts": { - "version": "c9ad058b2cc9ce6dc2ed92960d6d009e8c04bef46d3f5312283debca6869f613", - "signature": "c9ad058b2cc9ce6dc2ed92960d6d009e8c04bef46d3f5312283debca6869f613", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/run-parallel/index.d.ts": { - "version": "eefea34ce2cdb15ab6678c8c7911c27b2c3da267d7922f192f3d2eb0bf621821", - "signature": "eefea34ce2cdb15ab6678c8c7911c27b2c3da267d7922f192f3d2eb0bf621821", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/shelljs/index.d.ts": { - "version": "b73abc91e3166b1951d302f8008c17e62d32e570e71b2680141f7c3f5d0a990d", - "signature": "b73abc91e3166b1951d302f8008c17e62d32e570e71b2680141f7c3f5d0a990d", - "affectsGlobalScope": false - } - }, - "options": { - "target": 4, - "module": 1, - "moduleResolution": 2, - "strict": true, - "alwaysStrict": true, - "strictFunctionTypes": true, - "strictNullChecks": true, - "strictPropertyInitialization": true, - "forceConsistentCasingInFileNames": true, - "noImplicitAny": true, - "noImplicitReturns": true, - "noImplicitThis": true, - "noFallthroughCasesInSwitch": true, - "noUnusedLocals": true, - "noUnusedParameters": true, - "emitDecoratorMetadata": true, - "experimentalDecorators": true, - "downlevelIteration": true, - "composite": true, - "declaration": true, - "declarationMap": true, - "pretty": true, - "rootDir": "./src", - "outDir": "./out", - "configFilePath": "./tsconfig.json" - }, - "referencedMap": { - "../../../node_modules/@types/fs-extra/index.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/glob/index.d.ts": [ - "../../../node_modules/@types/minimatch/index.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/array.d.ts": [ - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/collection.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/common.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/date.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/function.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/lang.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/math.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/number.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/object.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/seq.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/string.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/util.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/index.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts" - ], - "../../../node_modules/@types/node/base.d.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/ts3.6/base.d.ts" - ], - "../../../node_modules/@types/node/child_process.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/cluster.d.ts": [ - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/crypto.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/dgram.d.ts": [ - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/domain.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/fs.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/http.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/http2.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/https.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/index.d.ts": [ - "../../../node_modules/@types/node/base.d.ts" - ], - "../../../node_modules/@types/node/inspector.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/net.d.ts": [ - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/perf_hooks.d.ts": [ - "../../../node_modules/@types/node/async_hooks.d.ts" - ], - "../../../node_modules/@types/node/process.d.ts": [ - "../../../node_modules/@types/node/tty.d.ts" - ], - "../../../node_modules/@types/node/readline.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/repl.d.ts": [ - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/vm.d.ts" - ], - "../../../node_modules/@types/node/stream.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/tls.d.ts": [ - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/ts3.3/base.d.ts": [ - "../../../node_modules/@types/node/async_hooks.d.ts", - "../../../node_modules/@types/node/buffer.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/cluster.d.ts", - "../../../node_modules/@types/node/console.d.ts", - "../../../node_modules/@types/node/constants.d.ts", - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dgram.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/domain.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/globals.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/http2.d.ts", - "../../../node_modules/@types/node/https.d.ts", - "../../../node_modules/@types/node/inspector.d.ts", - "../../../node_modules/@types/node/module.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/os.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/node/perf_hooks.d.ts", - "../../../node_modules/@types/node/process.d.ts", - "../../../node_modules/@types/node/punycode.d.ts", - "../../../node_modules/@types/node/querystring.d.ts", - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/repl.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/string_decoder.d.ts", - "../../../node_modules/@types/node/timers.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/trace_events.d.ts", - "../../../node_modules/@types/node/tty.d.ts", - "../../../node_modules/@types/node/url.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/v8.d.ts", - "../../../node_modules/@types/node/vm.d.ts", - "../../../node_modules/@types/node/worker_threads.d.ts", - "../../../node_modules/@types/node/zlib.d.ts" - ], - "../../../node_modules/@types/node/ts3.6/base.d.ts": [ - "../../../node_modules/@types/node/globals.global.d.ts", - "../../../node_modules/@types/node/ts3.3/base.d.ts", - "../../../node_modules/@types/node/wasi.d.ts" - ], - "../../../node_modules/@types/node/tty.d.ts": [ - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/url.d.ts": [ - "../../../node_modules/@types/node/querystring.d.ts" - ], - "../../../node_modules/@types/node/v8.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/worker_threads.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/vm.d.ts" - ], - "../../../node_modules/@types/node/zlib.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/rimraf/index.d.ts": [ - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/shelljs/index.d.ts": [ - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.macchiato/out/dirent.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../fs.macchiato/out/types.d.ts" - ], - "../fs.macchiato/out/index.d.ts": [ - "../fs.macchiato/out/dirent.d.ts", - "../fs.macchiato/out/stats.d.ts" - ], - "../fs.macchiato/out/stats.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../fs.macchiato/out/types.d.ts" - ], - "./src/adapters/fs.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../fs.macchiato/out/index.d.ts", - "./src/adapters/fs.ts" - ], - "./src/adapters/fs.ts": [ - "../../../node_modules/@types/node/fs.d.ts" - ], - "./src/index.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/rimraf/index.d.ts", - "./src/index.ts" - ], - "./src/index.ts": [ - "./src/adapters/fs.ts", - "./src/providers/async.ts", - "./src/providers/sync.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/async.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "../fs.macchiato/out/index.d.ts", - "./src/providers/async.ts", - "./src/settings.ts" - ], - "./src/providers/async.ts": [ - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/sync.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "../fs.macchiato/out/index.d.ts", - "./src/providers/sync.ts", - "./src/settings.ts" - ], - "./src/providers/sync.ts": [ - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/settings.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../fs.macchiato/out/index.d.ts", - "./src/adapters/fs.ts", - "./src/settings.ts" - ], - "./src/settings.ts": [ - "./src/adapters/fs.ts" - ], - "./src/types/index.ts": [ - "../../../node_modules/@types/node/fs.d.ts" - ] - }, - "exportedModulesMap": { - "../../../node_modules/@types/fs-extra/index.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/glob/index.d.ts": [ - "../../../node_modules/@types/minimatch/index.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/array.d.ts": [ - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/collection.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/common.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/date.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/function.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/lang.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/math.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/number.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/object.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/seq.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/string.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/util.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/index.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts" - ], - "../../../node_modules/@types/node/base.d.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/ts3.6/base.d.ts" - ], - "../../../node_modules/@types/node/child_process.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/cluster.d.ts": [ - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/crypto.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/dgram.d.ts": [ - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/domain.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/fs.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/http.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/http2.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/https.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/index.d.ts": [ - "../../../node_modules/@types/node/base.d.ts" - ], - "../../../node_modules/@types/node/inspector.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/net.d.ts": [ - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/perf_hooks.d.ts": [ - "../../../node_modules/@types/node/async_hooks.d.ts" - ], - "../../../node_modules/@types/node/process.d.ts": [ - "../../../node_modules/@types/node/tty.d.ts" - ], - "../../../node_modules/@types/node/readline.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/repl.d.ts": [ - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/vm.d.ts" - ], - "../../../node_modules/@types/node/stream.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/tls.d.ts": [ - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/ts3.3/base.d.ts": [ - "../../../node_modules/@types/node/async_hooks.d.ts", - "../../../node_modules/@types/node/buffer.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/cluster.d.ts", - "../../../node_modules/@types/node/console.d.ts", - "../../../node_modules/@types/node/constants.d.ts", - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dgram.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/domain.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/globals.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/http2.d.ts", - "../../../node_modules/@types/node/https.d.ts", - "../../../node_modules/@types/node/inspector.d.ts", - "../../../node_modules/@types/node/module.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/os.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/node/perf_hooks.d.ts", - "../../../node_modules/@types/node/process.d.ts", - "../../../node_modules/@types/node/punycode.d.ts", - "../../../node_modules/@types/node/querystring.d.ts", - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/repl.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/string_decoder.d.ts", - "../../../node_modules/@types/node/timers.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/trace_events.d.ts", - "../../../node_modules/@types/node/tty.d.ts", - "../../../node_modules/@types/node/url.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/v8.d.ts", - "../../../node_modules/@types/node/vm.d.ts", - "../../../node_modules/@types/node/worker_threads.d.ts", - "../../../node_modules/@types/node/zlib.d.ts" - ], - "../../../node_modules/@types/node/ts3.6/base.d.ts": [ - "../../../node_modules/@types/node/globals.global.d.ts", - "../../../node_modules/@types/node/ts3.3/base.d.ts", - "../../../node_modules/@types/node/wasi.d.ts" - ], - "../../../node_modules/@types/node/tty.d.ts": [ - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/url.d.ts": [ - "../../../node_modules/@types/node/querystring.d.ts" - ], - "../../../node_modules/@types/node/v8.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/worker_threads.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/vm.d.ts" - ], - "../../../node_modules/@types/node/zlib.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/rimraf/index.d.ts": [ - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/shelljs/index.d.ts": [ - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.macchiato/out/dirent.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../fs.macchiato/out/types.d.ts" - ], - "../fs.macchiato/out/index.d.ts": [ - "../fs.macchiato/out/dirent.d.ts", - "../fs.macchiato/out/stats.d.ts" - ], - "../fs.macchiato/out/stats.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../fs.macchiato/out/types.d.ts" - ], - "./src/adapters/fs.ts": [ - "../../../node_modules/@types/node/fs.d.ts" - ], - "./src/index.ts": [ - "./src/adapters/fs.ts", - "./src/providers/async.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/async.ts": [ - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/sync.ts": [ - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/settings.ts": [ - "./src/adapters/fs.ts" - ], - "./src/types/index.ts": [ - "../../../node_modules/@types/node/fs.d.ts" - ] - }, - "semanticDiagnosticsPerFile": [ - "../../../node_modules/@types/eslint-visitor-keys/index.d.ts", - "../../../node_modules/@types/fs-extra/index.d.ts", - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/highlight.js/index.d.ts", - "../../../node_modules/@types/json-schema/index.d.ts", - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts", - "../../../node_modules/@types/marked/index.d.ts", - "../../../node_modules/@types/minimatch/index.d.ts", - "../../../node_modules/@types/minimist/index.d.ts", - "../../../node_modules/@types/mocha/index.d.ts", - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/async_hooks.d.ts", - "../../../node_modules/@types/node/base.d.ts", - "../../../node_modules/@types/node/buffer.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/cluster.d.ts", - "../../../node_modules/@types/node/console.d.ts", - "../../../node_modules/@types/node/constants.d.ts", - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dgram.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/domain.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/globals.d.ts", - "../../../node_modules/@types/node/globals.global.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/http2.d.ts", - "../../../node_modules/@types/node/https.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../../../node_modules/@types/node/inspector.d.ts", - "../../../node_modules/@types/node/module.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/os.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/node/perf_hooks.d.ts", - "../../../node_modules/@types/node/process.d.ts", - "../../../node_modules/@types/node/punycode.d.ts", - "../../../node_modules/@types/node/querystring.d.ts", - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/repl.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/string_decoder.d.ts", - "../../../node_modules/@types/node/timers.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/trace_events.d.ts", - "../../../node_modules/@types/node/ts3.3/base.d.ts", - "../../../node_modules/@types/node/ts3.6/base.d.ts", - "../../../node_modules/@types/node/tty.d.ts", - "../../../node_modules/@types/node/url.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/v8.d.ts", - "../../../node_modules/@types/node/vm.d.ts", - "../../../node_modules/@types/node/wasi.d.ts", - "../../../node_modules/@types/node/worker_threads.d.ts", - "../../../node_modules/@types/node/zlib.d.ts", - "../../../node_modules/@types/normalize-package-data/index.d.ts", - "../../../node_modules/@types/rimraf/index.d.ts", - "../../../node_modules/@types/run-parallel/index.d.ts", - "../../../node_modules/@types/shelljs/index.d.ts", - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "../../../node_modules/typescript/lib/lib.dom.d.ts", - "../../../node_modules/typescript/lib/lib.dom.iterable.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.collection.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.core.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.generator.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.iterable.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.promise.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.proxy.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.reflect.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.symbol.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.symbol.wellknown.d.ts", - "../../../node_modules/typescript/lib/lib.es2016.array.include.d.ts", - "../../../node_modules/typescript/lib/lib.es2016.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.full.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.intl.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.object.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.sharedmemory.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.string.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.typedarrays.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.asyncgenerator.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.asynciterable.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.intl.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.promise.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.regexp.d.ts", - "../../../node_modules/typescript/lib/lib.es2020.bigint.d.ts", - "../../../node_modules/typescript/lib/lib.es5.d.ts", - "../../../node_modules/typescript/lib/lib.esnext.intl.d.ts", - "../../../node_modules/typescript/lib/lib.scripthost.d.ts", - "../../../node_modules/typescript/lib/lib.webworker.importscripts.d.ts", - "../fs.macchiato/out/dirent.d.ts", - "../fs.macchiato/out/index.d.ts", - "../fs.macchiato/out/stats.d.ts", - "../fs.macchiato/out/types.d.ts", - "./src/adapters/fs.spec.ts", - "./src/adapters/fs.ts", - "./src/index.spec.ts", - "./src/index.ts", - "./src/providers/async.spec.ts", - "./src/providers/async.ts", - "./src/providers/sync.spec.ts", - "./src/providers/sync.ts", - "./src/settings.spec.ts", - "./src/settings.ts", - "./src/types/index.ts" - ] - }, - "version": "3.9.7" -} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/.eslintcache b/node_modules/@nodelib/fs.walk/.eslintcache deleted file mode 100644 index 0d52eb1d..00000000 --- a/node_modules/@nodelib/fs.walk/.eslintcache +++ /dev/null @@ -1 +0,0 @@ -[{"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\index.spec.ts":"1","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\index.ts":"2","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\async.spec.ts":"3","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\async.ts":"4","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\index.ts":"5","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\stream.spec.ts":"6","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\stream.ts":"7","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\sync.spec.ts":"8","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\sync.ts":"9","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\async.spec.ts":"10","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\async.ts":"11","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\common.spec.ts":"12","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\common.ts":"13","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\reader.spec.ts":"14","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\reader.ts":"15","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\sync.spec.ts":"16","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\sync.ts":"17","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\settings.spec.ts":"18","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\settings.ts":"19","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\tests\\index.ts":"20","D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\types\\index.ts":"21"},{"size":3469,"mtime":1609075886224,"results":"22","hashOfConfig":"23"},{"size":2038,"mtime":1609075886224,"results":"24","hashOfConfig":"23"},{"size":1632,"mtime":1609075886225,"results":"25","hashOfConfig":"23"},{"size":1152,"mtime":1609075886225,"results":"26","hashOfConfig":"23"},{"size":171,"mtime":1609075886225,"results":"27","hashOfConfig":"23"},{"size":2441,"mtime":1609075886226,"results":"28","hashOfConfig":"23"},{"size":829,"mtime":1609075886226,"results":"29","hashOfConfig":"23"},{"size":975,"mtime":1609075886226,"results":"30","hashOfConfig":"23"},{"size":385,"mtime":1609075886227,"results":"31","hashOfConfig":"23"},{"size":6779,"mtime":1609075886227,"results":"32","hashOfConfig":"23"},{"size":3209,"mtime":1609075886227,"results":"33","hashOfConfig":"23"},{"size":3379,"mtime":1609078441945,"results":"34","hashOfConfig":"23"},{"size":852,"mtime":1609078441946,"results":"35","hashOfConfig":"23"},{"size":762,"mtime":1609075886228,"results":"36","hashOfConfig":"23"},{"size":287,"mtime":1609075886228,"results":"37","hashOfConfig":"23"},{"size":4065,"mtime":1609075886229,"results":"38","hashOfConfig":"23"},{"size":1778,"mtime":1609075886229,"results":"39","hashOfConfig":"23"},{"size":1063,"mtime":1609075886229,"results":"40","hashOfConfig":"23"},{"size":1828,"mtime":1609075886230,"results":"41","hashOfConfig":"23"},{"size":972,"mtime":1609075886230,"results":"42","hashOfConfig":"23"},{"size":193,"mtime":1609075886230,"results":"43","hashOfConfig":"23"},{"filePath":"44","messages":"45","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},"1ha7fjd",{"filePath":"46","messages":"47","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"48","messages":"49","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"50","messages":"51","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"52","messages":"53","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"54","messages":"55","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"56","messages":"57","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"58","messages":"59","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"60","messages":"61","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"62","messages":"63","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"64","messages":"65","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"66","messages":"67","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"68","messages":"69","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"70","messages":"71","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"72","messages":"73","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"74","messages":"75","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"76","messages":"77","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"78","messages":"79","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"80","messages":"81","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"82","messages":"83","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},{"filePath":"84","messages":"85","errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0},"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\index.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\index.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\async.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\async.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\index.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\stream.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\stream.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\sync.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\providers\\sync.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\async.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\async.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\common.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\common.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\reader.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\reader.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\sync.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\readers\\sync.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\settings.spec.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\settings.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\tests\\index.ts",[],"D:\\work\\OpenSource\\nodelib\\packages\\fs\\fs.walk\\src\\types\\index.ts",[]] \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/index.d.ts b/node_modules/@nodelib/fs.walk/out/index.d.ts index 5070b6a0..8864c7bf 100644 --- a/node_modules/@nodelib/fs.walk/out/index.d.ts +++ b/node_modules/@nodelib/fs.walk/out/index.d.ts @@ -1,15 +1,14 @@ -/// -import { Readable } from 'stream'; -import { Dirent, FileSystemAdapter } from '@nodelib/fs.scandir'; -import { AsyncCallback } from './providers/async'; -import Settings, { DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction, Options } from './settings'; -import { Entry } from './types'; -declare function walk(directory: string, callback: AsyncCallback): void; -declare function walk(directory: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -declare namespace walk { - function __promisify__(directory: string, optionsOrSettings?: Options | Settings): Promise; -} -declare function walkSync(directory: string, optionsOrSettings?: Options | Settings): Entry[]; -declare function walkStream(directory: string, optionsOrSettings?: Options | Settings): Readable; -export { walk, walkSync, walkStream, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, Options, DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction }; -//# sourceMappingURL=index.d.ts.map \ No newline at end of file +/// +import type { Readable } from 'stream'; +import type { Dirent, FileSystemAdapter } from '@nodelib/fs.scandir'; +import { AsyncCallback } from './providers/async'; +import Settings, { DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction, Options } from './settings'; +import type { Entry } from './types'; +declare function walk(directory: string, callback: AsyncCallback): void; +declare function walk(directory: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace walk { + function __promisify__(directory: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function walkSync(directory: string, optionsOrSettings?: Options | Settings): Entry[]; +declare function walkStream(directory: string, optionsOrSettings?: Options | Settings): Readable; +export { walk, walkSync, walkStream, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, Options, DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction }; diff --git a/node_modules/@nodelib/fs.walk/out/index.d.ts.map b/node_modules/@nodelib/fs.walk/out/index.d.ts.map deleted file mode 100644 index 8e8ff950..00000000 --- a/node_modules/@nodelib/fs.walk/out/index.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAElC,OAAO,EAAE,MAAM,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAEhE,OAAsB,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAGjE,OAAO,QAAQ,EAAE,EAAE,kBAAkB,EAAE,mBAAmB,EAAE,mBAAmB,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AAC7G,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAEhC,iBAAS,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,aAAa,GAAG,IAAI,CAAC;AAChE,iBAAS,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE,iBAAiB,EAAE,OAAO,GAAG,QAAQ,EAAE,QAAQ,EAAE,aAAa,GAAG,IAAI,CAAC;AAWvG,OAAO,WAAW,IAAI,CAAC;IACtB,SAAS,aAAa,CAAC,SAAS,EAAE,MAAM,EAAE,iBAAiB,CAAC,EAAE,OAAO,GAAG,QAAQ,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;CACpG;AAED,iBAAS,QAAQ,CAAC,SAAS,EAAE,MAAM,EAAE,iBAAiB,CAAC,EAAE,OAAO,GAAG,QAAQ,GAAG,KAAK,EAAE,CAKpF;AAED,iBAAS,UAAU,CAAC,SAAS,EAAE,MAAM,EAAE,iBAAiB,CAAC,EAAE,OAAO,GAAG,QAAQ,GAAG,QAAQ,CAKvF;AAUD,OAAO,EACN,IAAI,EACJ,QAAQ,EACR,UAAU,EACV,QAAQ,EAER,aAAa,EACb,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,OAAO,EACP,kBAAkB,EAClB,mBAAmB,EACnB,mBAAmB,EACnB,CAAC"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/index.js b/node_modules/@nodelib/fs.walk/out/index.js index 664644d7..15207874 100644 --- a/node_modules/@nodelib/fs.walk/out/index.js +++ b/node_modules/@nodelib/fs.walk/out/index.js @@ -1,33 +1,34 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0; -const async_1 = require("./providers/async"); -const stream_1 = require("./providers/stream"); -const sync_1 = require("./providers/sync"); -const settings_1 = require("./settings"); -exports.Settings = settings_1.default; -function walk(directory, optionsOrSettingsOrCallback, callback) { - if (typeof optionsOrSettingsOrCallback === 'function') { - return new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); - } - new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); -} -exports.walk = walk; -function walkSync(directory, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - const provider = new sync_1.default(directory, settings); - return provider.read(); -} -exports.walkSync = walkSync; -function walkStream(directory, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - const provider = new stream_1.default(directory, settings); - return provider.read(); -} -exports.walkStream = walkStream; -function getSettings(settingsOrOptions = {}) { - if (settingsOrOptions instanceof settings_1.default) { - return settingsOrOptions; - } - return new settings_1.default(settingsOrOptions); -} +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0; +const async_1 = require("./providers/async"); +const stream_1 = require("./providers/stream"); +const sync_1 = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function walk(directory, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); + return; + } + new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); +} +exports.walk = walk; +function walkSync(directory, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + const provider = new sync_1.default(directory, settings); + return provider.read(); +} +exports.walkSync = walkSync; +function walkStream(directory, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + const provider = new stream_1.default(directory, settings); + return provider.read(); +} +exports.walkStream = walkStream; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/@nodelib/fs.walk/out/index.spec.d.ts b/node_modules/@nodelib/fs.walk/out/index.spec.d.ts deleted file mode 100644 index 4e9d2bbe..00000000 --- a/node_modules/@nodelib/fs.walk/out/index.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=index.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/index.spec.d.ts.map b/node_modules/@nodelib/fs.walk/out/index.spec.d.ts.map deleted file mode 100644 index 47bd6666..00000000 --- a/node_modules/@nodelib/fs.walk/out/index.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.spec.d.ts","sourceRoot":"","sources":["../src/index.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/index.spec.js b/node_modules/@nodelib/fs.walk/out/index.spec.js deleted file mode 100644 index c854df32..00000000 --- a/node_modules/@nodelib/fs.walk/out/index.spec.js +++ /dev/null @@ -1,99 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const fs = require("fs"); -const rimraf = require("rimraf"); -const _1 = require("."); -const entryFilter = (entry) => !entry.dirent.isDirectory(); -function streamToPromise(stream) { - const entries = []; - return new Promise((resolve, reject) => { - stream.on('data', (entry) => entries.push(entry)); - stream.once('error', reject); - stream.once('end', () => resolve(entries)); - }); -} -describe('Package', () => { - before(() => { - rimraf.sync('fixtures'); - fs.mkdirSync('fixtures'); - fs.writeFileSync('fixtures/file.txt', ''); - fs.mkdirSync('fixtures/nested'); - fs.writeFileSync('fixtures/nested/file.txt', ''); - }); - after(() => { - rimraf.sync('fixtures'); - }); - describe('.walk', () => { - it('should throw an error for non-exist directory', (done) => { - _1.walk('non-exist-directory', (error, entries) => { - assert.strictEqual(error.code, 'ENOENT'); - assert.strictEqual(entries, undefined); - done(); - }); - }); - it('should work without options or settings', (done) => { - _1.walk('fixtures', (error, entries) => { - assert.strictEqual(error, null); - assert.strictEqual(entries.length, 3); - done(); - }); - }); - it('should work with options', (done) => { - _1.walk('fixtures', { entryFilter }, (error, entries) => { - assert.strictEqual(error, null); - assert.strictEqual(entries.length, 2); - done(); - }); - }); - it('should work with settings', (done) => { - const settings = new _1.Settings({ entryFilter }); - _1.walk('fixtures', settings, (error, entries) => { - assert.strictEqual(error, null); - assert.strictEqual(entries.length, 2); - done(); - }); - }); - }); - describe('.walkStream', () => { - it('should throw an error for non-exist directory', async () => { - const stream = _1.walkStream('non-exist-directory'); - await assert.rejects(() => streamToPromise(stream), (error) => error.code === 'ENOENT'); - }); - it('should work without options or settings', async () => { - const stream = _1.walkStream('fixtures'); - const actual = await streamToPromise(stream); - assert.strictEqual(actual.length, 3); - }); - it('should work with options', async () => { - const stream = _1.walkStream('fixtures', { entryFilter }); - const actual = await streamToPromise(stream); - assert.strictEqual(actual.length, 2); - }); - it('should work with settings', async () => { - const settings = new _1.Settings({ entryFilter }); - const stream = _1.walkStream('fixtures', settings); - const actual = await streamToPromise(stream); - assert.strictEqual(actual.length, 2); - }); - }); - describe('.walkSync', () => { - it('should throw an error for non-exist directory', () => { - const matcher = (error) => error.code === 'ENOENT'; - assert.throws(() => _1.walkSync('non-exist-directory'), matcher); - }); - it('should work without options or settings', () => { - const actual = _1.walkSync('fixtures'); - assert.strictEqual(actual.length, 3); - }); - it('should work with options', () => { - const actual = _1.walkSync('fixtures', { entryFilter }); - assert.strictEqual(actual.length, 2); - }); - it('should work with settings', () => { - const settings = new _1.Settings({ entryFilter }); - const actual = _1.walkSync('fixtures', settings); - assert.strictEqual(actual.length, 2); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.d.ts b/node_modules/@nodelib/fs.walk/out/providers/async.d.ts index 1f5f1bac..0f6717d7 100644 --- a/node_modules/@nodelib/fs.walk/out/providers/async.d.ts +++ b/node_modules/@nodelib/fs.walk/out/providers/async.d.ts @@ -1,13 +1,12 @@ -import AsyncReader from '../readers/async'; -import Settings from '../settings'; -import { Entry, Errno } from '../types'; -export declare type AsyncCallback = (err: Errno, entries: Entry[]) => void; -export default class AsyncProvider { - private readonly _root; - private readonly _settings; - protected readonly _reader: AsyncReader; - private readonly _storage; - constructor(_root: string, _settings: Settings); - read(callback: AsyncCallback): void; -} -//# sourceMappingURL=async.d.ts.map \ No newline at end of file +import AsyncReader from '../readers/async'; +import type Settings from '../settings'; +import type { Entry, Errno } from '../types'; +export declare type AsyncCallback = (error: Errno, entries: Entry[]) => void; +export default class AsyncProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: AsyncReader; + private readonly _storage; + constructor(_root: string, _settings: Settings); + read(callback: AsyncCallback): void; +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.d.ts.map b/node_modules/@nodelib/fs.walk/out/providers/async.d.ts.map deleted file mode 100644 index 8da402f2..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/async.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"async.d.ts","sourceRoot":"","sources":["../../src/providers/async.ts"],"names":[],"mappings":"AAAA,OAAO,WAAW,MAAM,kBAAkB,CAAC;AAC3C,OAAO,QAAQ,MAAM,aAAa,CAAC;AACnC,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,UAAU,CAAC;AAKxC,oBAAY,aAAa,GAAG,CAAC,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,IAAI,CAAC;AAEnE,MAAM,CAAC,OAAO,OAAO,aAAa;IAKrB,OAAO,CAAC,QAAQ,CAAC,KAAK;IAAU,OAAO,CAAC,QAAQ,CAAC,SAAS;IAJtE,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,WAAW,CAA+C;IAEtF,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAyB;gBAErB,KAAK,EAAE,MAAM,EAAmB,SAAS,EAAE,QAAQ;IAEzE,IAAI,CAAC,QAAQ,EAAE,aAAa,GAAG,IAAI;CAe1C"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.js b/node_modules/@nodelib/fs.walk/out/providers/async.js index 20e4ab5d..e09da834 100644 --- a/node_modules/@nodelib/fs.walk/out/providers/async.js +++ b/node_modules/@nodelib/fs.walk/out/providers/async.js @@ -1,30 +1,30 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const async_1 = require("../readers/async"); -class AsyncProvider { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._reader = new async_1.default(this._root, this._settings); - this._storage = new Set(); - } - read(callback) { - this._reader.onError((error) => { - callFailureCallback(callback, error); - }); - this._reader.onEntry((entry) => { - this._storage.add(entry); - }); - this._reader.onEnd(() => { - callSuccessCallback(callback, [...this._storage]); - }); - this._reader.read(); - } -} -exports.default = AsyncProvider; -function callFailureCallback(callback, error) { - callback(error); -} -function callSuccessCallback(callback, entries) { - callback(null, entries); -} +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const async_1 = require("../readers/async"); +class AsyncProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new async_1.default(this._root, this._settings); + this._storage = new Set(); + } + read(callback) { + this._reader.onError((error) => { + callFailureCallback(callback, error); + }); + this._reader.onEntry((entry) => { + this._storage.add(entry); + }); + this._reader.onEnd(() => { + callSuccessCallback(callback, [...this._storage]); + }); + this._reader.read(); + } +} +exports.default = AsyncProvider; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, entries) { + callback(null, entries); +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.spec.d.ts b/node_modules/@nodelib/fs.walk/out/providers/async.spec.d.ts deleted file mode 100644 index c3f82484..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/async.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=async.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.spec.d.ts.map b/node_modules/@nodelib/fs.walk/out/providers/async.spec.d.ts.map deleted file mode 100644 index a67f476b..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/async.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"async.spec.d.ts","sourceRoot":"","sources":["../../src/providers/async.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.spec.js b/node_modules/@nodelib/fs.walk/out/providers/async.spec.js deleted file mode 100644 index b947d31a..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/async.spec.js +++ /dev/null @@ -1,42 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const sinon = require("sinon"); -const settings_1 = require("../settings"); -const tests = require("../tests"); -const async_1 = require("./async"); -class TestProvider extends async_1.default { - constructor(_root, _settings = new settings_1.default()) { - super(_root, _settings); - this._reader = new tests.TestAsyncReader(); - } - get reader() { - return this._reader; - } -} -describe('Providers → Async', () => { - describe('.read', () => { - it('should call reader function with correct set of arguments', () => { - const provider = new TestProvider('directory'); - const fakeCallback = sinon.stub(); - provider.read(fakeCallback); - assert.ok(provider.reader.read.called); - }); - it('should call callback with error for failed launch', () => { - const provider = new TestProvider('directory'); - const fakeCallback = sinon.stub(); - provider.reader.onError.yields(tests.EPERM_ERRNO); - provider.read(fakeCallback); - assert.deepStrictEqual(fakeCallback.args, [[tests.EPERM_ERRNO]]); - }); - it('should push entries to storage and call callback with array of entries', () => { - const provider = new TestProvider('directory'); - const fakeEntry = tests.buildFakeFileEntry(); - const fakeCallback = sinon.stub(); - provider.reader.onEntry.yields(fakeEntry); - provider.reader.onEnd.yields(); - provider.read(fakeCallback); - assert.deepStrictEqual(fakeCallback.args, [[null, [fakeEntry]]]); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.d.ts b/node_modules/@nodelib/fs.walk/out/providers/index.d.ts index cbdfb3b9..874f60c5 100644 --- a/node_modules/@nodelib/fs.walk/out/providers/index.d.ts +++ b/node_modules/@nodelib/fs.walk/out/providers/index.d.ts @@ -1,5 +1,4 @@ -import AsyncProvider from './async'; -import StreamProvider from './stream'; -import SyncProvider from './sync'; -export { AsyncProvider, StreamProvider, SyncProvider }; -//# sourceMappingURL=index.d.ts.map \ No newline at end of file +import AsyncProvider from './async'; +import StreamProvider from './stream'; +import SyncProvider from './sync'; +export { AsyncProvider, StreamProvider, SyncProvider }; diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.d.ts.map b/node_modules/@nodelib/fs.walk/out/providers/index.d.ts.map deleted file mode 100644 index b66c2b1f..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/index.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/providers/index.ts"],"names":[],"mappings":"AAAA,OAAO,aAAa,MAAM,SAAS,CAAC;AACpC,OAAO,cAAc,MAAM,UAAU,CAAC;AACtC,OAAO,YAAY,MAAM,QAAQ,CAAC;AAElC,OAAO,EACN,aAAa,EACb,cAAc,EACd,YAAY,EACZ,CAAC"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.js b/node_modules/@nodelib/fs.walk/out/providers/index.js index 6d632d6a..4c2529ce 100644 --- a/node_modules/@nodelib/fs.walk/out/providers/index.js +++ b/node_modules/@nodelib/fs.walk/out/providers/index.js @@ -1,9 +1,9 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SyncProvider = exports.StreamProvider = exports.AsyncProvider = void 0; -const async_1 = require("./async"); -exports.AsyncProvider = async_1.default; -const stream_1 = require("./stream"); -exports.StreamProvider = stream_1.default; -const sync_1 = require("./sync"); -exports.SyncProvider = sync_1.default; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SyncProvider = exports.StreamProvider = exports.AsyncProvider = void 0; +const async_1 = require("./async"); +exports.AsyncProvider = async_1.default; +const stream_1 = require("./stream"); +exports.StreamProvider = stream_1.default; +const sync_1 = require("./sync"); +exports.SyncProvider = sync_1.default; diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts b/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts index 810111d4..294185f8 100644 --- a/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts +++ b/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts @@ -1,13 +1,12 @@ -/// -import { Readable } from 'stream'; -import AsyncReader from '../readers/async'; -import Settings from '../settings'; -export default class StreamProvider { - private readonly _root; - private readonly _settings; - protected readonly _reader: AsyncReader; - protected readonly _stream: Readable; - constructor(_root: string, _settings: Settings); - read(): Readable; -} -//# sourceMappingURL=stream.d.ts.map \ No newline at end of file +/// +import { Readable } from 'stream'; +import AsyncReader from '../readers/async'; +import type Settings from '../settings'; +export default class StreamProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: AsyncReader; + protected readonly _stream: Readable; + constructor(_root: string, _settings: Settings); + read(): Readable; +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts.map b/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts.map deleted file mode 100644 index 47ad13ea..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["../../src/providers/stream.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAClC,OAAO,WAAW,MAAM,kBAAkB,CAAC;AAC3C,OAAO,QAAQ,MAAM,aAAa,CAAC;AAEnC,MAAM,CAAC,OAAO,OAAO,cAAc;IAYtB,OAAO,CAAC,QAAQ,CAAC,KAAK;IAAU,OAAO,CAAC,QAAQ,CAAC,SAAS;IAXtE,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,WAAW,CAA+C;IACtF,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,QAAQ,CAQjC;gBAE0B,KAAK,EAAE,MAAM,EAAmB,SAAS,EAAE,QAAQ;IAEzE,IAAI,IAAI,QAAQ;CAiBvB"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.js b/node_modules/@nodelib/fs.walk/out/providers/stream.js index 3bd49493..51298b0f 100644 --- a/node_modules/@nodelib/fs.walk/out/providers/stream.js +++ b/node_modules/@nodelib/fs.walk/out/providers/stream.js @@ -1,34 +1,34 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const stream_1 = require("stream"); -const async_1 = require("../readers/async"); -class StreamProvider { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._reader = new async_1.default(this._root, this._settings); - this._stream = new stream_1.Readable({ - objectMode: true, - read: () => { }, - destroy: () => { - if (!this._reader.isDestroyed) { - this._reader.destroy(); - } - } - }); - } - read() { - this._reader.onError((error) => { - this._stream.emit('error', error); - }); - this._reader.onEntry((entry) => { - this._stream.push(entry); - }); - this._reader.onEnd(() => { - this._stream.push(null); - }); - this._reader.read(); - return this._stream; - } -} -exports.default = StreamProvider; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const stream_1 = require("stream"); +const async_1 = require("../readers/async"); +class StreamProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new async_1.default(this._root, this._settings); + this._stream = new stream_1.Readable({ + objectMode: true, + read: () => { }, + destroy: () => { + if (!this._reader.isDestroyed) { + this._reader.destroy(); + } + } + }); + } + read() { + this._reader.onError((error) => { + this._stream.emit('error', error); + }); + this._reader.onEntry((entry) => { + this._stream.push(entry); + }); + this._reader.onEnd(() => { + this._stream.push(null); + }); + this._reader.read(); + return this._stream; + } +} +exports.default = StreamProvider; diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.spec.d.ts b/node_modules/@nodelib/fs.walk/out/providers/stream.spec.d.ts deleted file mode 100644 index b45ee18e..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/stream.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=stream.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.spec.d.ts.map b/node_modules/@nodelib/fs.walk/out/providers/stream.spec.d.ts.map deleted file mode 100644 index d16ea614..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/stream.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"stream.spec.d.ts","sourceRoot":"","sources":["../../src/providers/stream.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.spec.js b/node_modules/@nodelib/fs.walk/out/providers/stream.spec.js deleted file mode 100644 index 79a6b29f..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/stream.spec.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const stream_1 = require("stream"); -const sinon = require("sinon"); -const settings_1 = require("../settings"); -const tests = require("../tests"); -const stream_2 = require("./stream"); -class TestProvider extends stream_2.default { - constructor(_root, _settings = new settings_1.default()) { - super(_root, _settings); - this._reader = new tests.TestAsyncReader(); - this._stream.emit = sinon.stub(); - this._stream.push = sinon.stub(); - } - get reader() { - return this._reader; - } - get stream() { - return this._stream; - } -} -describe('Providers → Stream', () => { - describe('.read', () => { - it('should return stream', () => { - const provider = new TestProvider('directory'); - const stream = provider.read(); - assert.ok(stream instanceof stream_1.Readable); - }); - it('should call reader function with correct set of arguments', () => { - const provider = new TestProvider('directory'); - provider.read(); - assert.ok(provider.reader.read.called); - }); - it('should re-emit the "error" event from reader', () => { - const provider = new TestProvider('directory'); - provider.reader.onError.yields(tests.EPERM_ERRNO); - provider.read(); - assert.deepStrictEqual(provider.stream.emit.args, [['error', tests.EPERM_ERRNO]]); - }); - it('should call the "push" method with entry value for the "entry" event from reader', () => { - const provider = new TestProvider('directory'); - const fakeEntry = tests.buildFakeFileEntry(); - provider.reader.onEntry.yields(fakeEntry); - provider.read(); - assert.deepStrictEqual(provider.stream.push.args, [[fakeEntry]]); - }); - it('should call the "push" method with "null" value for the "end" event from reader', () => { - const provider = new TestProvider('directory'); - provider.reader.onEnd.yields(); - provider.read(); - assert.deepStrictEqual(provider.stream.push.args, [[null]]); - }); - it('should do not destroy reader when it is already destroyed', () => { - const provider = new TestProvider('directory'); - const stream = provider.read(); - stream.destroy(); - assert.ok(stream.destroyed); - assert.doesNotThrow(() => stream.destroy()); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts b/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts index 9570fd11..551c42e4 100644 --- a/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts +++ b/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts @@ -1,11 +1,10 @@ -import SyncReader from '../readers/sync'; -import Settings from '../settings'; -import { Entry } from '../types'; -export default class SyncProvider { - private readonly _root; - private readonly _settings; - protected readonly _reader: SyncReader; - constructor(_root: string, _settings: Settings); - read(): Entry[]; -} -//# sourceMappingURL=sync.d.ts.map \ No newline at end of file +import SyncReader from '../readers/sync'; +import type Settings from '../settings'; +import type { Entry } from '../types'; +export default class SyncProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: SyncReader; + constructor(_root: string, _settings: Settings); + read(): Entry[]; +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts.map b/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts.map deleted file mode 100644 index 8d921973..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"sync.d.ts","sourceRoot":"","sources":["../../src/providers/sync.ts"],"names":[],"mappings":"AAAA,OAAO,UAAU,MAAM,iBAAiB,CAAC;AACzC,OAAO,QAAQ,MAAM,aAAa,CAAC;AACnC,OAAO,EAAE,KAAK,EAAE,MAAM,UAAU,CAAC;AAEjC,MAAM,CAAC,OAAO,OAAO,YAAY;IAGpB,OAAO,CAAC,QAAQ,CAAC,KAAK;IAAU,OAAO,CAAC,QAAQ,CAAC,SAAS;IAFtE,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,UAAU,CAA8C;gBAEvD,KAAK,EAAE,MAAM,EAAmB,SAAS,EAAE,QAAQ;IAEzE,IAAI,IAAI,KAAK,EAAE;CAGtB"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.js b/node_modules/@nodelib/fs.walk/out/providers/sync.js index fef1d8d8..faab6ca2 100644 --- a/node_modules/@nodelib/fs.walk/out/providers/sync.js +++ b/node_modules/@nodelib/fs.walk/out/providers/sync.js @@ -1,14 +1,14 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const sync_1 = require("../readers/sync"); -class SyncProvider { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._reader = new sync_1.default(this._root, this._settings); - } - read() { - return this._reader.read(); - } -} -exports.default = SyncProvider; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const sync_1 = require("../readers/sync"); +class SyncProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new sync_1.default(this._root, this._settings); + } + read() { + return this._reader.read(); + } +} +exports.default = SyncProvider; diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.spec.d.ts b/node_modules/@nodelib/fs.walk/out/providers/sync.spec.d.ts deleted file mode 100644 index 5167ab33..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/sync.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=sync.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.spec.d.ts.map b/node_modules/@nodelib/fs.walk/out/providers/sync.spec.d.ts.map deleted file mode 100644 index aae94987..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/sync.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"sync.spec.d.ts","sourceRoot":"","sources":["../../src/providers/sync.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.spec.js b/node_modules/@nodelib/fs.walk/out/providers/sync.spec.js deleted file mode 100644 index 1fe94c5d..00000000 --- a/node_modules/@nodelib/fs.walk/out/providers/sync.spec.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const settings_1 = require("../settings"); -const tests = require("../tests"); -const sync_1 = require("./sync"); -class TestProvider extends sync_1.default { - constructor(_root, _settings = new settings_1.default()) { - super(_root, _settings); - this._reader = new tests.TestSyncReader(); - } - get reader() { - return this._reader; - } -} -describe('Providers → Sync', () => { - describe('.read', () => { - it('should call reader function with correct set of arguments and got result', () => { - const provider = new TestProvider('directory'); - const fakeEntry = tests.buildFakeFileEntry(); - provider.reader.read.returns([fakeEntry]); - const actual = provider.read(); - assert.deepStrictEqual(actual, [fakeEntry]); - assert.ok(provider.reader.read.called); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.d.ts b/node_modules/@nodelib/fs.walk/out/readers/async.d.ts index 80f9a8ca..9acf4e6c 100644 --- a/node_modules/@nodelib/fs.walk/out/readers/async.d.ts +++ b/node_modules/@nodelib/fs.walk/out/readers/async.d.ts @@ -1,31 +1,30 @@ -/// -import { EventEmitter } from 'events'; -import * as fsScandir from '@nodelib/fs.scandir'; -import Settings from '../settings'; -import { Entry, Errno } from '../types'; -import Reader from './reader'; -declare type EntryEventCallback = (entry: Entry) => void; -declare type ErrorEventCallback = (error: Errno) => void; -declare type EndEventCallback = () => void; -export default class AsyncReader extends Reader { - protected readonly _settings: Settings; - protected readonly _scandir: typeof fsScandir.scandir; - protected readonly _emitter: EventEmitter; - private readonly _queue; - private _isFatalError; - private _isDestroyed; - constructor(_root: string, _settings: Settings); - read(): EventEmitter; - get isDestroyed(): boolean; - destroy(): void; - onEntry(callback: EntryEventCallback): void; - onError(callback: ErrorEventCallback): void; - onEnd(callback: EndEventCallback): void; - private _pushToQueue; - private _worker; - private _handleError; - private _handleEntry; - private _emitEntry; -} -export {}; -//# sourceMappingURL=async.d.ts.map \ No newline at end of file +/// +import { EventEmitter } from 'events'; +import * as fsScandir from '@nodelib/fs.scandir'; +import type Settings from '../settings'; +import type { Entry, Errno } from '../types'; +import Reader from './reader'; +declare type EntryEventCallback = (entry: Entry) => void; +declare type ErrorEventCallback = (error: Errno) => void; +declare type EndEventCallback = () => void; +export default class AsyncReader extends Reader { + protected readonly _settings: Settings; + protected readonly _scandir: typeof fsScandir.scandir; + protected readonly _emitter: EventEmitter; + private readonly _queue; + private _isFatalError; + private _isDestroyed; + constructor(_root: string, _settings: Settings); + read(): EventEmitter; + get isDestroyed(): boolean; + destroy(): void; + onEntry(callback: EntryEventCallback): void; + onError(callback: ErrorEventCallback): void; + onEnd(callback: EndEventCallback): void; + private _pushToQueue; + private _worker; + private _handleError; + private _handleEntry; + private _emitEntry; +} +export {}; diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.d.ts.map b/node_modules/@nodelib/fs.walk/out/readers/async.d.ts.map deleted file mode 100644 index 4e0defa7..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/async.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"async.d.ts","sourceRoot":"","sources":["../../src/readers/async.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AAEtC,OAAO,KAAK,SAAS,MAAM,qBAAqB,CAAC;AAGjD,OAAO,QAAQ,MAAM,aAAa,CAAC;AACnC,OAAO,EAAE,KAAK,EAAE,KAAK,EAAa,MAAM,UAAU,CAAC;AAEnD,OAAO,MAAM,MAAM,UAAU,CAAC;AAE9B,aAAK,kBAAkB,GAAG,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;AACjD,aAAK,kBAAkB,GAAG,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;AACjD,aAAK,gBAAgB,GAAG,MAAM,IAAI,CAAC;AAEnC,MAAM,CAAC,OAAO,OAAO,WAAY,SAAQ,MAAM;IAQnB,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,QAAQ;IAPjE,SAAS,CAAC,QAAQ,CAAC,QAAQ,EAAE,OAAO,SAAS,CAAC,OAAO,CAAqB;IAC1E,SAAS,CAAC,QAAQ,CAAC,QAAQ,EAAE,YAAY,CAAsB;IAE/D,OAAO,CAAC,QAAQ,CAAC,MAAM,CAA2E;IAClG,OAAO,CAAC,aAAa,CAAkB;IACvC,OAAO,CAAC,YAAY,CAAkB;gBAE1B,KAAK,EAAE,MAAM,EAAqB,SAAS,EAAE,QAAQ;IAU1D,IAAI,IAAI,YAAY;IAW3B,IAAW,WAAW,IAAI,OAAO,CAEhC;IAEM,OAAO,IAAI,IAAI;IASf,OAAO,CAAC,QAAQ,EAAE,kBAAkB,GAAG,IAAI;IAI3C,OAAO,CAAC,QAAQ,EAAE,kBAAkB,GAAG,IAAI;IAI3C,KAAK,CAAC,QAAQ,EAAE,gBAAgB,GAAG,IAAI;IAI9C,OAAO,CAAC,YAAY;IAUpB,OAAO,CAAC,OAAO;IAcf,OAAO,CAAC,YAAY;IAUpB,OAAO,CAAC,YAAY;IAoBpB,OAAO,CAAC,UAAU;CAGlB"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.js b/node_modules/@nodelib/fs.walk/out/readers/async.js index 93e6670b..5a9be5bb 100644 --- a/node_modules/@nodelib/fs.walk/out/readers/async.js +++ b/node_modules/@nodelib/fs.walk/out/readers/async.js @@ -1,96 +1,97 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const events_1 = require("events"); -const fsScandir = require("@nodelib/fs.scandir"); -const fastq = require("fastq"); -const common = require("./common"); -const reader_1 = require("./reader"); -class AsyncReader extends reader_1.default { - constructor(_root, _settings) { - super(_root, _settings); - this._settings = _settings; - this._scandir = fsScandir.scandir; - this._emitter = new events_1.EventEmitter(); - this._queue = fastq(this._worker.bind(this), this._settings.concurrency); - this._isFatalError = false; - this._isDestroyed = false; - this._queue.drain = () => { - if (!this._isFatalError) { - this._emitter.emit('end'); - } - }; - } - read() { - this._isFatalError = false; - this._isDestroyed = false; - setImmediate(() => { - this._pushToQueue(this._root, this._settings.basePath); - }); - return this._emitter; - } - get isDestroyed() { - return this._isDestroyed; - } - destroy() { - if (this._isDestroyed) { - throw new Error('The reader is already destroyed'); - } - this._isDestroyed = true; - this._queue.killAndDrain(); - } - onEntry(callback) { - this._emitter.on('entry', callback); - } - onError(callback) { - this._emitter.once('error', callback); - } - onEnd(callback) { - this._emitter.once('end', callback); - } - _pushToQueue(directory, base) { - const queueItem = { directory, base }; - this._queue.push(queueItem, (error) => { - if (error !== null) { - this._handleError(error); - } - }); - } - _worker(item, done) { - this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { - if (error !== null) { - return done(error, undefined); - } - for (const entry of entries) { - this._handleEntry(entry, item.base); - } - done(null, undefined); - }); - } - _handleError(error) { - if (this._isDestroyed || !common.isFatalError(this._settings, error)) { - return; - } - this._isFatalError = true; - this._isDestroyed = true; - this._emitter.emit('error', error); - } - _handleEntry(entry, base) { - if (this._isDestroyed || this._isFatalError) { - return; - } - const fullpath = entry.path; - if (base !== undefined) { - entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); - } - if (common.isAppliedFilter(this._settings.entryFilter, entry)) { - this._emitEntry(entry); - } - if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, entry.path); - } - } - _emitEntry(entry) { - this._emitter.emit('entry', entry); - } -} -exports.default = AsyncReader; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const events_1 = require("events"); +const fsScandir = require("@nodelib/fs.scandir"); +const fastq = require("fastq"); +const common = require("./common"); +const reader_1 = require("./reader"); +class AsyncReader extends reader_1.default { + constructor(_root, _settings) { + super(_root, _settings); + this._settings = _settings; + this._scandir = fsScandir.scandir; + this._emitter = new events_1.EventEmitter(); + this._queue = fastq(this._worker.bind(this), this._settings.concurrency); + this._isFatalError = false; + this._isDestroyed = false; + this._queue.drain = () => { + if (!this._isFatalError) { + this._emitter.emit('end'); + } + }; + } + read() { + this._isFatalError = false; + this._isDestroyed = false; + setImmediate(() => { + this._pushToQueue(this._root, this._settings.basePath); + }); + return this._emitter; + } + get isDestroyed() { + return this._isDestroyed; + } + destroy() { + if (this._isDestroyed) { + throw new Error('The reader is already destroyed'); + } + this._isDestroyed = true; + this._queue.killAndDrain(); + } + onEntry(callback) { + this._emitter.on('entry', callback); + } + onError(callback) { + this._emitter.once('error', callback); + } + onEnd(callback) { + this._emitter.once('end', callback); + } + _pushToQueue(directory, base) { + const queueItem = { directory, base }; + this._queue.push(queueItem, (error) => { + if (error !== null) { + this._handleError(error); + } + }); + } + _worker(item, done) { + this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { + if (error !== null) { + done(error, undefined); + return; + } + for (const entry of entries) { + this._handleEntry(entry, item.base); + } + done(null, undefined); + }); + } + _handleError(error) { + if (this._isDestroyed || !common.isFatalError(this._settings, error)) { + return; + } + this._isFatalError = true; + this._isDestroyed = true; + this._emitter.emit('error', error); + } + _handleEntry(entry, base) { + if (this._isDestroyed || this._isFatalError) { + return; + } + const fullpath = entry.path; + if (base !== undefined) { + entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); + } + if (common.isAppliedFilter(this._settings.entryFilter, entry)) { + this._emitEntry(entry); + } + if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { + this._pushToQueue(fullpath, entry.path); + } + } + _emitEntry(entry) { + this._emitter.emit('entry', entry); + } +} +exports.default = AsyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.spec.d.ts b/node_modules/@nodelib/fs.walk/out/readers/async.spec.d.ts deleted file mode 100644 index c3f82484..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/async.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=async.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.spec.d.ts.map b/node_modules/@nodelib/fs.walk/out/readers/async.spec.d.ts.map deleted file mode 100644 index 94379426..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/async.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"async.spec.d.ts","sourceRoot":"","sources":["../../src/readers/async.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.spec.js b/node_modules/@nodelib/fs.walk/out/readers/async.spec.js deleted file mode 100644 index 32fd7859..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/async.spec.js +++ /dev/null @@ -1,165 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const path = require("path"); -const sinon = require("sinon"); -const settings_1 = require("../settings"); -const tests = require("../tests"); -const async_1 = require("./async"); -class TestReader extends async_1.default { - constructor(_root, _settings = new settings_1.default()) { - super(_root, _settings); - this._scandir = sinon.stub(); - } - get scandir() { - return this._scandir; - } -} -describe('Readers → Async', () => { - describe('.read', () => { - it('should emit "error" event when the first call of scandir is broken', (done) => { - const reader = new TestReader('non-exist-directory'); - reader.scandir.yields(tests.EPERM_ERRNO); - reader.onError((error) => { - assert.ok(error); - done(); - }); - reader.read(); - }); - it('should emit "end" event when the first call of scandir is broken but this error can be suppressed', (done) => { - const settings = new settings_1.default({ - errorFilter: (error) => error.code === 'EPERM' - }); - const reader = new TestReader('non-exist-directory', settings); - reader.scandir.yields(tests.EPERM_ERRNO); - reader.onEnd(() => { - done(); - }); - reader.read(); - }); - it('should do not emit events after first broken scandir call', (done) => { - const reader = new TestReader('directory'); - const firstFakeDirectoryEntry = tests.buildFakeDirectoryEntry({ name: 'a', path: 'directory/a' }); - const secondFakeDirectoryEntry = tests.buildFakeDirectoryEntry({ name: 'b', path: 'directory/b' }); - reader.scandir.onFirstCall().yields(null, [firstFakeDirectoryEntry, secondFakeDirectoryEntry]); - reader.scandir.onSecondCall().yieldsAsync(tests.EPERM_ERRNO); - reader.scandir.onThirdCall().yieldsAsync(tests.EPERM_ERRNO); - /** - * If the behavior is broken, then a third scandir call will trigger an unhandled error. - */ - reader.onError((error) => { - assert.ok(error); - done(); - }); - reader.read(); - }); - it('should return entries', (done) => { - const reader = new TestReader('directory'); - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - reader.scandir.onFirstCall().yields(null, [fakeDirectoryEntry]); - reader.scandir.onSecondCall().yields(null, [fakeFileEntry]); - const entries = []; - reader.onEntry((entry) => entries.push(entry)); - reader.onEnd(() => { - assert.deepStrictEqual(entries, [fakeDirectoryEntry, fakeFileEntry]); - done(); - }); - reader.read(); - }); - it('should push to results only directories', (done) => { - const settings = new settings_1.default({ entryFilter: (entry) => !entry.dirent.isFile() }); - const reader = new TestReader('directory', settings); - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - reader.scandir.onFirstCall().yields(null, [fakeDirectoryEntry]); - reader.scandir.onSecondCall().yields(null, [fakeFileEntry]); - const entries = []; - reader.onEntry((entry) => entries.push(entry)); - reader.onEnd(() => { - assert.deepStrictEqual(entries, [fakeDirectoryEntry]); - done(); - }); - reader.read(); - }); - it('should do not read root directory', (done) => { - const settings = new settings_1.default({ deepFilter: () => false }); - const reader = new TestReader('directory', settings); - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - reader.scandir.onFirstCall().yields(null, [fakeDirectoryEntry]); - reader.scandir.onSecondCall().yields(null, [fakeFileEntry]); - const entries = []; - reader.onEntry((entry) => entries.push(entry)); - reader.onEnd(() => { - assert.deepStrictEqual(entries, [fakeDirectoryEntry]); - done(); - }); - reader.read(); - }); - it('should set base path to entry when the `basePath` option is exist', (done) => { - const settings = new settings_1.default({ basePath: 'base' }); - const reader = new TestReader('directory', settings); - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - reader.scandir.onFirstCall().yields(null, [fakeDirectoryEntry]); - reader.scandir.onSecondCall().yields(null, [fakeFileEntry]); - const entries = []; - reader.onEntry((entry) => entries.push(entry)); - reader.onEnd(() => { - assert.strictEqual(entries[0].path, path.join('base', fakeDirectoryEntry.name)); - assert.strictEqual(entries[1].path, path.join('base', 'fake', fakeFileEntry.name)); - done(); - }); - reader.read(); - }); - it('should set base path to entry when the `basePath` option is exist and value is an empty string', (done) => { - const settings = new settings_1.default({ basePath: '' }); - const reader = new TestReader('directory', settings); - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - reader.scandir.onFirstCall().yields(null, [fakeDirectoryEntry]); - reader.scandir.onSecondCall().yields(null, [fakeFileEntry]); - const entries = []; - reader.onEntry((entry) => entries.push(entry)); - reader.onEnd(() => { - assert.strictEqual(entries[0].path, path.join(fakeDirectoryEntry.name)); - assert.strictEqual(entries[1].path, path.join('fake', fakeFileEntry.name)); - done(); - }); - reader.read(); - }); - }); - describe('.destroy', () => { - it('should do not emit entries after destroy', (done) => { - const reader = new TestReader('directory'); - const firstFakeDirectoryEntry = tests.buildFakeDirectoryEntry({ name: 'a', path: 'directory/a' }); - const fakeFileEntry = tests.buildFakeFileEntry(); - reader.scandir.onFirstCall().yields(null, [firstFakeDirectoryEntry]); - reader.scandir.onSecondCall().yields(null, [fakeFileEntry]); - reader.onEntry((entry) => { - if (entry.name === 'a') { - reader.destroy(); - } - else { - assert.fail('should do not emit entries after destroy'); - } - }); - reader.onEnd(() => { - done(); - }); - reader.read(); - }); - it('should mark stream as "destroyed" after first destroy', () => { - const reader = new TestReader('directory'); - reader.destroy(); - assert.ok(reader.isDestroyed); - }); - it('should throw an error when trying to destroy reader twice', () => { - const reader = new TestReader('directory'); - const expectedErrorMessageRe = /The reader is already destroyed/; - reader.destroy(); - assert.throws(() => reader.destroy(), expectedErrorMessageRe); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.d.ts b/node_modules/@nodelib/fs.walk/out/readers/common.d.ts index 93bbae32..5985f97c 100644 --- a/node_modules/@nodelib/fs.walk/out/readers/common.d.ts +++ b/node_modules/@nodelib/fs.walk/out/readers/common.d.ts @@ -1,7 +1,7 @@ -import Settings, { FilterFunction } from '../settings'; -import { Errno } from '../types'; -export declare function isFatalError(settings: Settings, error: Errno): boolean; -export declare function isAppliedFilter(filter: FilterFunction | null, value: T): boolean; -export declare function replacePathSegmentSeparator(filepath: string, separator: string): string; -export declare function joinPathSegments(a: string, b: string, separator: string): string; -//# sourceMappingURL=common.d.ts.map \ No newline at end of file +import type { FilterFunction } from '../settings'; +import type Settings from '../settings'; +import type { Errno } from '../types'; +export declare function isFatalError(settings: Settings, error: Errno): boolean; +export declare function isAppliedFilter(filter: FilterFunction | null, value: T): boolean; +export declare function replacePathSegmentSeparator(filepath: string, separator: string): string; +export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.d.ts.map b/node_modules/@nodelib/fs.walk/out/readers/common.d.ts.map deleted file mode 100644 index 8a5b96ea..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/common.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"common.d.ts","sourceRoot":"","sources":["../../src/readers/common.ts"],"names":[],"mappings":"AAAA,OAAO,QAAQ,EAAE,EAAE,cAAc,EAAE,MAAM,aAAa,CAAC;AACvD,OAAO,EAAE,KAAK,EAAE,MAAM,UAAU,CAAC;AAEjC,wBAAgB,YAAY,CAAC,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,GAAG,OAAO,CAMtE;AAED,wBAAgB,eAAe,CAAC,CAAC,EAAE,MAAM,EAAE,cAAc,CAAC,CAAC,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,GAAG,OAAO,CAEtF;AAED,wBAAgB,2BAA2B,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM,CAEvF;AAED,wBAAgB,gBAAgB,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM,CAahF"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.js b/node_modules/@nodelib/fs.walk/out/readers/common.js index 8294d591..a93572f4 100644 --- a/node_modules/@nodelib/fs.walk/out/readers/common.js +++ b/node_modules/@nodelib/fs.walk/out/readers/common.js @@ -1,31 +1,31 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0; -function isFatalError(settings, error) { - if (settings.errorFilter === null) { - return true; - } - return !settings.errorFilter(error); -} -exports.isFatalError = isFatalError; -function isAppliedFilter(filter, value) { - return filter === null || filter(value); -} -exports.isAppliedFilter = isAppliedFilter; -function replacePathSegmentSeparator(filepath, separator) { - return filepath.split(/[/\\]/).join(separator); -} -exports.replacePathSegmentSeparator = replacePathSegmentSeparator; -function joinPathSegments(a, b, separator) { - if (a === '') { - return b; - } - /** - * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). - */ - if (a.endsWith(separator)) { - return a + b; - } - return a + separator + b; -} -exports.joinPathSegments = joinPathSegments; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0; +function isFatalError(settings, error) { + if (settings.errorFilter === null) { + return true; + } + return !settings.errorFilter(error); +} +exports.isFatalError = isFatalError; +function isAppliedFilter(filter, value) { + return filter === null || filter(value); +} +exports.isAppliedFilter = isAppliedFilter; +function replacePathSegmentSeparator(filepath, separator) { + return filepath.split(/[/\\]/).join(separator); +} +exports.replacePathSegmentSeparator = replacePathSegmentSeparator; +function joinPathSegments(a, b, separator) { + if (a === '') { + return b; + } + /** + * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). + */ + if (a.endsWith(separator)) { + return a + b; + } + return a + separator + b; +} +exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.spec.d.ts b/node_modules/@nodelib/fs.walk/out/readers/common.spec.d.ts deleted file mode 100644 index c5ccf64b..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/common.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=common.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.spec.d.ts.map b/node_modules/@nodelib/fs.walk/out/readers/common.spec.d.ts.map deleted file mode 100644 index 14213d94..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/common.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"common.spec.d.ts","sourceRoot":"","sources":["../../src/readers/common.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.spec.js b/node_modules/@nodelib/fs.walk/out/readers/common.spec.js deleted file mode 100644 index 93734aaa..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/common.spec.js +++ /dev/null @@ -1,85 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const path = require("path"); -const settings_1 = require("../settings"); -const tests = require("../tests"); -const common = require("./common"); -describe('Readers → Common', () => { - describe('.isFatalError', () => { - it('should return true when filter is not defined', () => { - const settings = new settings_1.default(); - const actual = common.isFatalError(settings, tests.EPERM_ERRNO); - assert.ok(actual); - }); - it('should return true when the error cannot be suppressed', () => { - const settings = new settings_1.default({ - errorFilter: (error) => error.code === 'ENOENT' - }); - const actual = common.isFatalError(settings, tests.EPERM_ERRNO); - assert.ok(actual); - }); - it('should return false when the error can be suppressed', () => { - const settings = new settings_1.default({ - errorFilter: (error) => error.code === 'EPERM' - }); - const actual = common.isFatalError(settings, tests.EPERM_ERRNO); - assert.ok(!actual); - }); - }); - describe('.isAppliedFilter', () => { - it('should return true when the filter is not defined', () => { - const settings = new settings_1.default(); - const entry = tests.buildFakeFileEntry(); - const actual = common.isAppliedFilter(settings.entryFilter, entry); - assert.ok(actual); - }); - it('should return true when the entry will be applied', () => { - const settings = new settings_1.default({ - entryFilter: (entry) => entry.name === 'fake.txt' - }); - const fakeEntry = tests.buildFakeFileEntry(); - const actual = common.isAppliedFilter(settings.entryFilter, fakeEntry); - assert.ok(actual); - }); - it('should return false when the entry will be skipped', () => { - const settings = new settings_1.default({ - entryFilter: (entry) => entry.name !== 'fake.txt' - }); - const fakeEntry = tests.buildFakeFileEntry(); - const actual = common.isAppliedFilter(settings.entryFilter, fakeEntry); - assert.ok(!actual); - }); - }); - describe('.replacePathSegmentSeparator', () => { - it('should replace path segment separator', () => { - const filepath = path.join('directory', 'file.txt'); - const expected = 'directory_file.txt'; - const actual = common.replacePathSegmentSeparator(filepath, '_'); - assert.strictEqual(actual, expected); - }); - }); - describe('.joinPathSegments', () => { - it('should return concatenated string', () => { - const expected = 'a&b'; - const actual = common.joinPathSegments('a', 'b', '&'); - assert.strictEqual(actual, expected); - }); - it('should return second part of path when the first path is an empty string', () => { - const expected = 'b'; - const actual = common.joinPathSegments('', 'b', '&'); - assert.strictEqual(actual, expected); - }); - it('should return correct string when the first segment ens with the separator symbol', () => { - // Unix - assert.strictEqual(common.joinPathSegments('/', 'a', '/'), '/a'); - assert.strictEqual(common.joinPathSegments('//', 'a', '/'), '//a'); - assert.strictEqual(common.joinPathSegments('/a/', 'b', '/'), '/a/b'); - // Windows - assert.strictEqual(common.joinPathSegments('C:/', 'Users', '/'), 'C:/Users'); - assert.strictEqual(common.joinPathSegments('C:\\', 'Users', '\\'), 'C:\\Users'); - assert.strictEqual(common.joinPathSegments('//?/C:/', 'Users', '/'), '//?/C:/Users'); - assert.strictEqual(common.joinPathSegments('\\\\?\\C:\\', 'Users', '\\'), '\\\\?\\C:\\Users'); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts b/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts index 688968f8..e1f383b2 100644 --- a/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts +++ b/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts @@ -1,7 +1,6 @@ -import Settings from '../settings'; -export default class Reader { - protected readonly _root: string; - protected readonly _settings: Settings; - constructor(_root: string, _settings: Settings); -} -//# sourceMappingURL=reader.d.ts.map \ No newline at end of file +import type Settings from '../settings'; +export default class Reader { + protected readonly _root: string; + protected readonly _settings: Settings; + constructor(_root: string, _settings: Settings); +} diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts.map b/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts.map deleted file mode 100644 index 4b59981c..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"reader.d.ts","sourceRoot":"","sources":["../../src/readers/reader.ts"],"names":[],"mappings":"AAAA,OAAO,QAAQ,MAAM,aAAa,CAAC;AAGnC,MAAM,CAAC,OAAO,OAAO,MAAM;IACd,SAAS,CAAC,QAAQ,CAAC,KAAK,EAAE,MAAM;IAAE,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,QAAQ;gBAArD,KAAK,EAAE,MAAM,EAAqB,SAAS,EAAE,QAAQ;CAGpF"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.js b/node_modules/@nodelib/fs.walk/out/readers/reader.js index 25e7997f..782f07cb 100644 --- a/node_modules/@nodelib/fs.walk/out/readers/reader.js +++ b/node_modules/@nodelib/fs.walk/out/readers/reader.js @@ -1,11 +1,11 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const common = require("./common"); -class Reader { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); - } -} -exports.default = Reader; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const common = require("./common"); +class Reader { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); + } +} +exports.default = Reader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.spec.d.ts b/node_modules/@nodelib/fs.walk/out/readers/reader.spec.d.ts deleted file mode 100644 index eda56bb1..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/reader.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=reader.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.spec.d.ts.map b/node_modules/@nodelib/fs.walk/out/readers/reader.spec.d.ts.map deleted file mode 100644 index bbe7c4cf..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/reader.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"reader.spec.d.ts","sourceRoot":"","sources":["../../src/readers/reader.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.spec.js b/node_modules/@nodelib/fs.walk/out/readers/reader.spec.js deleted file mode 100644 index a805ca88..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/reader.spec.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const path = require("path"); -const settings_1 = require("../settings"); -const reader_1 = require("./reader"); -class TestReader extends reader_1.default { - get root() { - return this._root; - } -} -function getReader(root, options = {}) { - return new TestReader(root, new settings_1.default(options)); -} -describe('Readers → Reader', () => { - describe('Constructor', () => { - it('should return root path with replaced path segment separators', () => { - const root = path.join('directory', 'file.txt'); - const reader = getReader(root, { pathSegmentSeparator: '_' }); - const expected = 'directory_file.txt'; - const actual = reader.root; - assert.strictEqual(actual, expected); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts b/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts index b0bb2675..af410335 100644 --- a/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts +++ b/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts @@ -1,16 +1,15 @@ -import * as fsScandir from '@nodelib/fs.scandir'; -import { Entry } from '../types'; -import Reader from './reader'; -export default class SyncReader extends Reader { - protected readonly _scandir: typeof fsScandir.scandirSync; - private readonly _storage; - private readonly _queue; - read(): Entry[]; - private _pushToQueue; - private _handleQueue; - private _handleDirectory; - private _handleError; - private _handleEntry; - private _pushToStorage; -} -//# sourceMappingURL=sync.d.ts.map \ No newline at end of file +import * as fsScandir from '@nodelib/fs.scandir'; +import type { Entry } from '../types'; +import Reader from './reader'; +export default class SyncReader extends Reader { + protected readonly _scandir: typeof fsScandir.scandirSync; + private readonly _storage; + private readonly _queue; + read(): Entry[]; + private _pushToQueue; + private _handleQueue; + private _handleDirectory; + private _handleError; + private _handleEntry; + private _pushToStorage; +} diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts.map b/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts.map deleted file mode 100644 index f6d327f5..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"sync.d.ts","sourceRoot":"","sources":["../../src/readers/sync.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,SAAS,MAAM,qBAAqB,CAAC;AAEjD,OAAO,EAAE,KAAK,EAAoB,MAAM,UAAU,CAAC;AAEnD,OAAO,MAAM,MAAM,UAAU,CAAC;AAE9B,MAAM,CAAC,OAAO,OAAO,UAAW,SAAQ,MAAM;IAC7C,SAAS,CAAC,QAAQ,CAAC,QAAQ,EAAE,OAAO,SAAS,CAAC,WAAW,CAAyB;IAElF,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAyB;IAClD,OAAO,CAAC,QAAQ,CAAC,MAAM,CAA6B;IAE7C,IAAI,IAAI,KAAK,EAAE;IAOtB,OAAO,CAAC,YAAY;IAIpB,OAAO,CAAC,YAAY;IAMpB,OAAO,CAAC,gBAAgB;IAYxB,OAAO,CAAC,YAAY;IAQpB,OAAO,CAAC,YAAY;IAgBpB,OAAO,CAAC,cAAc;CAGtB"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.js b/node_modules/@nodelib/fs.walk/out/readers/sync.js index d0f06914..e99df328 100644 --- a/node_modules/@nodelib/fs.walk/out/readers/sync.js +++ b/node_modules/@nodelib/fs.walk/out/readers/sync.js @@ -1,59 +1,59 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fsScandir = require("@nodelib/fs.scandir"); -const common = require("./common"); -const reader_1 = require("./reader"); -class SyncReader extends reader_1.default { - constructor() { - super(...arguments); - this._scandir = fsScandir.scandirSync; - this._storage = new Set(); - this._queue = new Set(); - } - read() { - this._pushToQueue(this._root, this._settings.basePath); - this._handleQueue(); - return [...this._storage]; - } - _pushToQueue(directory, base) { - this._queue.add({ directory, base }); - } - _handleQueue() { - for (const item of this._queue.values()) { - this._handleDirectory(item.directory, item.base); - } - } - _handleDirectory(directory, base) { - try { - const entries = this._scandir(directory, this._settings.fsScandirSettings); - for (const entry of entries) { - this._handleEntry(entry, base); - } - } - catch (error) { - this._handleError(error); - } - } - _handleError(error) { - if (!common.isFatalError(this._settings, error)) { - return; - } - throw error; - } - _handleEntry(entry, base) { - const fullpath = entry.path; - if (base !== undefined) { - entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); - } - if (common.isAppliedFilter(this._settings.entryFilter, entry)) { - this._pushToStorage(entry); - } - if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, entry.path); - } - } - _pushToStorage(entry) { - this._storage.add(entry); - } -} -exports.default = SyncReader; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fsScandir = require("@nodelib/fs.scandir"); +const common = require("./common"); +const reader_1 = require("./reader"); +class SyncReader extends reader_1.default { + constructor() { + super(...arguments); + this._scandir = fsScandir.scandirSync; + this._storage = new Set(); + this._queue = new Set(); + } + read() { + this._pushToQueue(this._root, this._settings.basePath); + this._handleQueue(); + return [...this._storage]; + } + _pushToQueue(directory, base) { + this._queue.add({ directory, base }); + } + _handleQueue() { + for (const item of this._queue.values()) { + this._handleDirectory(item.directory, item.base); + } + } + _handleDirectory(directory, base) { + try { + const entries = this._scandir(directory, this._settings.fsScandirSettings); + for (const entry of entries) { + this._handleEntry(entry, base); + } + } + catch (error) { + this._handleError(error); + } + } + _handleError(error) { + if (!common.isFatalError(this._settings, error)) { + return; + } + throw error; + } + _handleEntry(entry, base) { + const fullpath = entry.path; + if (base !== undefined) { + entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); + } + if (common.isAppliedFilter(this._settings.entryFilter, entry)) { + this._pushToStorage(entry); + } + if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { + this._pushToQueue(fullpath, entry.path); + } + } + _pushToStorage(entry) { + this._storage.add(entry); + } +} +exports.default = SyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.spec.d.ts b/node_modules/@nodelib/fs.walk/out/readers/sync.spec.d.ts deleted file mode 100644 index 5167ab33..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/sync.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=sync.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.spec.d.ts.map b/node_modules/@nodelib/fs.walk/out/readers/sync.spec.d.ts.map deleted file mode 100644 index bae880e1..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/sync.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"sync.spec.d.ts","sourceRoot":"","sources":["../../src/readers/sync.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.spec.js b/node_modules/@nodelib/fs.walk/out/readers/sync.spec.js deleted file mode 100644 index bfd915f1..00000000 --- a/node_modules/@nodelib/fs.walk/out/readers/sync.spec.js +++ /dev/null @@ -1,89 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const path = require("path"); -const sinon = require("sinon"); -const settings_1 = require("../settings"); -const tests = require("../tests"); -const sync_1 = require("./sync"); -class TestReader extends sync_1.default { - constructor(_root, _settings = new settings_1.default()) { - super(_root, _settings); - this._scandir = sinon.stub(); - } - get scandir() { - return this._scandir; - } -} -describe('Readers → Sync', () => { - describe('.read', () => { - it('should throw an error when the first call of scandir is broken', () => { - const reader = new TestReader('non-exist-directory'); - reader.scandir.throws(tests.EPERM_ERRNO); - assert.throws(() => reader.read(), { code: 'EPERM' }); - }); - it('should return empty array when the first call of scandir is broken but this error can be suppressed', () => { - const settings = new settings_1.default({ - errorFilter: (error) => error.code === 'EPERM' - }); - const reader = new TestReader('non-exist-directory', settings); - reader.scandir.throws(tests.EPERM_ERRNO); - const actual = reader.read(); - assert.deepStrictEqual(actual, []); - }); - it('should return entries', () => { - const reader = new TestReader('directory'); - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - reader.scandir.onFirstCall().returns([fakeDirectoryEntry]); - reader.scandir.onSecondCall().returns([fakeFileEntry]); - const expected = [fakeDirectoryEntry, fakeFileEntry]; - const actual = reader.read(); - assert.deepStrictEqual(actual, expected); - }); - it('should push to results only directories', () => { - const settings = new settings_1.default({ entryFilter: (entry) => !entry.dirent.isFile() }); - const reader = new TestReader('directory', settings); - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - reader.scandir.onFirstCall().returns([fakeDirectoryEntry]); - reader.scandir.onSecondCall().returns([fakeFileEntry]); - const expected = [fakeDirectoryEntry]; - const actual = reader.read(); - assert.deepStrictEqual(actual, expected); - }); - it('should do not read root directory', () => { - const settings = new settings_1.default({ deepFilter: () => false }); - const reader = new TestReader('directory', settings); - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - reader.scandir.onFirstCall().returns([fakeDirectoryEntry]); - reader.scandir.onSecondCall().returns([fakeFileEntry]); - const expected = [fakeDirectoryEntry]; - const actual = reader.read(); - assert.deepStrictEqual(actual, expected); - }); - it('should set base path to entry when the `basePath` option is exist', () => { - const settings = new settings_1.default({ basePath: 'base' }); - const reader = new TestReader('directory', settings); - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - reader.scandir.onFirstCall().returns([fakeDirectoryEntry]); - reader.scandir.onSecondCall().returns([fakeFileEntry]); - const actual = reader.read(); - assert.strictEqual(actual[0].path, path.join('base', fakeDirectoryEntry.name)); - assert.strictEqual(actual[1].path, path.join('base', 'fake', fakeFileEntry.name)); - }); - it('should set base path to entry when the `basePath` option is exist and value is an empty string', () => { - const settings = new settings_1.default({ basePath: '' }); - const reader = new TestReader('directory', settings); - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - reader.scandir.onFirstCall().returns([fakeDirectoryEntry]); - reader.scandir.onSecondCall().returns([fakeFileEntry]); - const actual = reader.read(); - assert.strictEqual(actual[0].path, fakeDirectoryEntry.name); - assert.strictEqual(actual[1].path, path.join('fake', fakeFileEntry.name)); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/out/settings.d.ts b/node_modules/@nodelib/fs.walk/out/settings.d.ts index bc1f9d55..d1c4b45f 100644 --- a/node_modules/@nodelib/fs.walk/out/settings.d.ts +++ b/node_modules/@nodelib/fs.walk/out/settings.d.ts @@ -1,31 +1,30 @@ -import * as fsScandir from '@nodelib/fs.scandir'; -import { Entry, Errno } from './types'; -export declare type FilterFunction = (value: T) => boolean; -export declare type DeepFilterFunction = FilterFunction; -export declare type EntryFilterFunction = FilterFunction; -export declare type ErrorFilterFunction = FilterFunction; -export declare type Options = { - basePath?: string; - concurrency?: number; - deepFilter?: DeepFilterFunction; - entryFilter?: EntryFilterFunction; - errorFilter?: ErrorFilterFunction; - followSymbolicLinks?: boolean; - fs?: Partial; - pathSegmentSeparator?: string; - stats?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -}; -export default class Settings { - private readonly _options; - readonly basePath?: string; - readonly concurrency: number; - readonly deepFilter: DeepFilterFunction | null; - readonly entryFilter: EntryFilterFunction | null; - readonly errorFilter: ErrorFilterFunction | null; - readonly pathSegmentSeparator: string; - readonly fsScandirSettings: fsScandir.Settings; - constructor(_options?: Options); - private _getValue; -} -//# sourceMappingURL=settings.d.ts.map \ No newline at end of file +import * as fsScandir from '@nodelib/fs.scandir'; +import type { Entry, Errno } from './types'; +export declare type FilterFunction = (value: T) => boolean; +export declare type DeepFilterFunction = FilterFunction; +export declare type EntryFilterFunction = FilterFunction; +export declare type ErrorFilterFunction = FilterFunction; +export interface Options { + basePath?: string; + concurrency?: number; + deepFilter?: DeepFilterFunction; + entryFilter?: EntryFilterFunction; + errorFilter?: ErrorFilterFunction; + followSymbolicLinks?: boolean; + fs?: Partial; + pathSegmentSeparator?: string; + stats?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly basePath?: string; + readonly concurrency: number; + readonly deepFilter: DeepFilterFunction | null; + readonly entryFilter: EntryFilterFunction | null; + readonly errorFilter: ErrorFilterFunction | null; + readonly pathSegmentSeparator: string; + readonly fsScandirSettings: fsScandir.Settings; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/@nodelib/fs.walk/out/settings.d.ts.map b/node_modules/@nodelib/fs.walk/out/settings.d.ts.map deleted file mode 100644 index 007e06f4..00000000 --- a/node_modules/@nodelib/fs.walk/out/settings.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"settings.d.ts","sourceRoot":"","sources":["../src/settings.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,SAAS,MAAM,qBAAqB,CAAC;AAEjD,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAEvC,oBAAY,cAAc,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,KAAK,OAAO,CAAC;AACtD,oBAAY,kBAAkB,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;AACvD,oBAAY,mBAAmB,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;AACxD,oBAAY,mBAAmB,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;AAExD,oBAAY,OAAO,GAAG;IACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,UAAU,CAAC,EAAE,kBAAkB,CAAC;IAChC,WAAW,CAAC,EAAE,mBAAmB,CAAC;IAClC,WAAW,CAAC,EAAE,mBAAmB,CAAC;IAClC,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,EAAE,CAAC,EAAE,OAAO,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAC;IAC1C,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAC9B,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,8BAA8B,CAAC,EAAE,OAAO,CAAC;CACzC,CAAC;AAEF,MAAM,CAAC,OAAO,OAAO,QAAQ;IAgBhB,OAAO,CAAC,QAAQ,CAAC,QAAQ;IAfrC,SAAgB,QAAQ,CAAC,EAAE,MAAM,CAAqD;IACtF,SAAgB,WAAW,EAAE,MAAM,CAAuD;IAC1F,SAAgB,UAAU,EAAE,kBAAkB,GAAG,IAAI,CAAkD;IACvG,SAAgB,WAAW,EAAE,mBAAmB,GAAG,IAAI,CAAmD;IAC1G,SAAgB,WAAW,EAAE,mBAAmB,GAAG,IAAI,CAAmD;IAC1G,SAAgB,oBAAoB,EAAE,MAAM,CAAgE;IAE5G,SAAgB,iBAAiB,EAAE,SAAS,CAAC,QAAQ,CAMlD;gBAE0B,QAAQ,GAAE,OAAY;IAEnD,OAAO,CAAC,SAAS;CAGjB"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/settings.js b/node_modules/@nodelib/fs.walk/out/settings.js index d0e46574..d7a85c81 100644 --- a/node_modules/@nodelib/fs.walk/out/settings.js +++ b/node_modules/@nodelib/fs.walk/out/settings.js @@ -1,26 +1,26 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fsScandir = require("@nodelib/fs.scandir"); -class Settings { - constructor(_options = {}) { - this._options = _options; - this.basePath = this._getValue(this._options.basePath, undefined); - this.concurrency = this._getValue(this._options.concurrency, Infinity); - this.deepFilter = this._getValue(this._options.deepFilter, null); - this.entryFilter = this._getValue(this._options.entryFilter, null); - this.errorFilter = this._getValue(this._options.errorFilter, null); - this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); - this.fsScandirSettings = new fsScandir.Settings({ - followSymbolicLinks: this._options.followSymbolicLinks, - fs: this._options.fs, - pathSegmentSeparator: this._options.pathSegmentSeparator, - stats: this._options.stats, - throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink - }); - } - _getValue(option, value) { - return option !== null && option !== void 0 ? option : value; - } -} -exports.default = Settings; +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsScandir = require("@nodelib/fs.scandir"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.basePath = this._getValue(this._options.basePath, undefined); + this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY); + this.deepFilter = this._getValue(this._options.deepFilter, null); + this.entryFilter = this._getValue(this._options.entryFilter, null); + this.errorFilter = this._getValue(this._options.errorFilter, null); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); + this.fsScandirSettings = new fsScandir.Settings({ + followSymbolicLinks: this._options.followSymbolicLinks, + fs: this._options.fs, + pathSegmentSeparator: this._options.pathSegmentSeparator, + stats: this._options.stats, + throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink + }); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/@nodelib/fs.walk/out/settings.spec.d.ts b/node_modules/@nodelib/fs.walk/out/settings.spec.d.ts deleted file mode 100644 index ff5bc0fd..00000000 --- a/node_modules/@nodelib/fs.walk/out/settings.spec.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=settings.spec.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/settings.spec.d.ts.map b/node_modules/@nodelib/fs.walk/out/settings.spec.d.ts.map deleted file mode 100644 index b69e1aa7..00000000 --- a/node_modules/@nodelib/fs.walk/out/settings.spec.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"settings.spec.d.ts","sourceRoot":"","sources":["../src/settings.spec.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/settings.spec.js b/node_modules/@nodelib/fs.walk/out/settings.spec.js deleted file mode 100644 index ed076888..00000000 --- a/node_modules/@nodelib/fs.walk/out/settings.spec.js +++ /dev/null @@ -1,28 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -const fsScandir = require("@nodelib/fs.scandir"); -const settings_1 = require("./settings"); -describe('Settings', () => { - it('should return instance with default values', () => { - const fsWalkSettings = new settings_1.default(); - const fsScandirSettings = new fsScandir.Settings({ - followSymbolicLinks: undefined, - fs: undefined, - pathSegmentSeparator: undefined, - stats: undefined, - throwErrorOnBrokenSymbolicLink: undefined - }); - assert.strictEqual(fsWalkSettings.basePath, undefined); - assert.strictEqual(fsWalkSettings.concurrency, Infinity); - assert.strictEqual(fsWalkSettings.deepFilter, null); - assert.strictEqual(fsWalkSettings.entryFilter, null); - assert.strictEqual(fsWalkSettings.errorFilter, null); - assert.deepStrictEqual(fsWalkSettings.fsScandirSettings, fsScandirSettings); - }); - it('should return instance with custom values', () => { - const filter = () => true; - const fsWalkSettings = new settings_1.default({ entryFilter: filter }); - assert.strictEqual(fsWalkSettings.entryFilter, filter); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/out/tests/index.d.ts b/node_modules/@nodelib/fs.walk/out/tests/index.d.ts deleted file mode 100644 index 0e54bb8d..00000000 --- a/node_modules/@nodelib/fs.walk/out/tests/index.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -import * as sinon from 'sinon'; -import { Entry, Errno } from '../types'; -export declare function buildFakeFileEntry(entry?: Partial): Entry; -export declare function buildFakeDirectoryEntry(entry?: Partial): Entry; -export declare const EPERM_ERRNO: Errno; -export declare class TestAsyncReader { - read: sinon.SinonStub; - destroy: sinon.SinonStub; - onError: sinon.SinonStub; - onEntry: sinon.SinonStub; - onEnd: sinon.SinonStub; -} -export declare class TestSyncReader { - read: sinon.SinonStub; -} -//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/tests/index.d.ts.map b/node_modules/@nodelib/fs.walk/out/tests/index.d.ts.map deleted file mode 100644 index a0c2c66c..00000000 --- a/node_modules/@nodelib/fs.walk/out/tests/index.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/tests/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAG/B,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,UAAU,CAAC;AAExC,wBAAgB,kBAAkB,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC,KAAK,CAAC,GAAG,KAAK,CAOhE;AAED,wBAAgB,uBAAuB,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC,KAAK,CAAC,GAAG,KAAK,CAOrE;AAED,eAAO,MAAM,WAAW,EAAE,KAIzB,CAAC;AAEF,qBAAa,eAAe;IACpB,IAAI,EAAE,KAAK,CAAC,SAAS,CAAgB;IACrC,OAAO,EAAE,KAAK,CAAC,SAAS,CAAgB;IACxC,OAAO,EAAE,KAAK,CAAC,SAAS,CAAgB;IACxC,OAAO,EAAE,KAAK,CAAC,SAAS,CAAgB;IACxC,KAAK,EAAE,KAAK,CAAC,SAAS,CAAgB;CAC7C;AAED,qBAAa,cAAc;IACnB,IAAI,EAAE,KAAK,CAAC,SAAS,CAAgB;CAC5C"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/tests/index.js b/node_modules/@nodelib/fs.walk/out/tests/index.js deleted file mode 100644 index a53715fb..00000000 --- a/node_modules/@nodelib/fs.walk/out/tests/index.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TestSyncReader = exports.TestAsyncReader = exports.EPERM_ERRNO = exports.buildFakeDirectoryEntry = exports.buildFakeFileEntry = void 0; -const sinon = require("sinon"); -const fs_macchiato_1 = require("../../../fs.macchiato"); -function buildFakeFileEntry(entry) { - return Object.assign({ name: 'fake.txt', path: 'directory/fake.txt', dirent: new fs_macchiato_1.Dirent({ name: 'fake.txt' }) }, entry); -} -exports.buildFakeFileEntry = buildFakeFileEntry; -function buildFakeDirectoryEntry(entry) { - return Object.assign({ name: 'fake', path: 'directory/fake', dirent: new fs_macchiato_1.Dirent({ name: 'fake', isFile: false, isDirectory: true }) }, entry); -} -exports.buildFakeDirectoryEntry = buildFakeDirectoryEntry; -exports.EPERM_ERRNO = { - name: 'EPERM', - code: 'EPERM', - message: 'EPERM' -}; -class TestAsyncReader { - constructor() { - this.read = sinon.stub(); - this.destroy = sinon.stub(); - this.onError = sinon.stub(); - this.onEntry = sinon.stub(); - this.onEnd = sinon.stub(); - } -} -exports.TestAsyncReader = TestAsyncReader; -class TestSyncReader { - constructor() { - this.read = sinon.stub(); - } -} -exports.TestSyncReader = TestSyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/types/index.d.ts b/node_modules/@nodelib/fs.walk/out/types/index.d.ts index 75bc4ab5..6ee9bd3f 100644 --- a/node_modules/@nodelib/fs.walk/out/types/index.d.ts +++ b/node_modules/@nodelib/fs.walk/out/types/index.d.ts @@ -1,9 +1,8 @@ -/// -import * as scandir from '@nodelib/fs.scandir'; -export declare type Entry = scandir.Entry; -export declare type Errno = NodeJS.ErrnoException; -export declare type QueueItem = { - directory: string; - base?: string; -}; -//# sourceMappingURL=index.d.ts.map \ No newline at end of file +/// +import type * as scandir from '@nodelib/fs.scandir'; +export declare type Entry = scandir.Entry; +export declare type Errno = NodeJS.ErrnoException; +export interface QueueItem { + directory: string; + base?: string; +} diff --git a/node_modules/@nodelib/fs.walk/out/types/index.d.ts.map b/node_modules/@nodelib/fs.walk/out/types/index.d.ts.map deleted file mode 100644 index 1a73ab65..00000000 --- a/node_modules/@nodelib/fs.walk/out/types/index.d.ts.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/types/index.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,OAAO,MAAM,qBAAqB,CAAC;AAE/C,oBAAY,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC;AAClC,oBAAY,KAAK,GAAG,MAAM,CAAC,cAAc,CAAC;AAE1C,oBAAY,SAAS,GAAG;IACvB,SAAS,EAAE,MAAM,CAAC;IAClB,IAAI,CAAC,EAAE,MAAM,CAAC;CACd,CAAC"} \ No newline at end of file diff --git a/node_modules/@nodelib/fs.walk/out/types/index.js b/node_modules/@nodelib/fs.walk/out/types/index.js index ce03781e..c8ad2e54 100644 --- a/node_modules/@nodelib/fs.walk/out/types/index.js +++ b/node_modules/@nodelib/fs.walk/out/types/index.js @@ -1,2 +1,2 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.walk/package.json b/node_modules/@nodelib/fs.walk/package.json index 71dcbe7a..14972102 100644 --- a/node_modules/@nodelib/fs.walk/package.json +++ b/node_modules/@nodelib/fs.walk/package.json @@ -1,6 +1,6 @@ { "name": "@nodelib/fs.walk", - "version": "1.2.6", + "version": "1.2.7", "description": "A library for efficiently walking a directory recursively", "license": "MIT", "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.walk", @@ -16,6 +16,12 @@ "engines": { "node": ">= 8" }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*", + "!out/**/tests/**" + ], "main": "out/index.js", "typings": "out/index.d.ts", "scripts": { @@ -28,8 +34,11 @@ "watch": "npm run clean && npm run compile:watch" }, "dependencies": { - "@nodelib/fs.scandir": "2.1.4", + "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" }, - "gitHead": "bd149ea6dc1ef18c234730278271f6a7f3882ed2" + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4" + }, + "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" } diff --git a/node_modules/@nodelib/fs.walk/src/index.spec.ts b/node_modules/@nodelib/fs.walk/src/index.spec.ts deleted file mode 100644 index 6a2913e4..00000000 --- a/node_modules/@nodelib/fs.walk/src/index.spec.ts +++ /dev/null @@ -1,129 +0,0 @@ -import * as assert from 'assert'; -import * as fs from 'fs'; -import { Readable } from 'stream'; - -import * as rimraf from 'rimraf'; - -import { Errno } from './types'; -import { walk, walkSync, walkStream, Settings, Entry } from '.'; - -const entryFilter = (entry: Entry): boolean => !entry.dirent.isDirectory(); - -function streamToPromise(stream: Readable): Promise { - const entries: Entry[] = []; - - return new Promise((resolve, reject) => { - stream.on('data', (entry: Entry) => entries.push(entry)); - stream.once('error', reject); - stream.once('end', () => resolve(entries)); - }); -} - -describe('Package', () => { - before(() => { - rimraf.sync('fixtures'); - - fs.mkdirSync('fixtures'); - fs.writeFileSync('fixtures/file.txt', ''); - fs.mkdirSync('fixtures/nested'); - fs.writeFileSync('fixtures/nested/file.txt', ''); - }); - - after(() => { - rimraf.sync('fixtures'); - }); - - describe('.walk', () => { - it('should throw an error for non-exist directory', (done) => { - walk('non-exist-directory', (error, entries) => { - assert.strictEqual(error.code, 'ENOENT'); - assert.strictEqual(entries, undefined); - done(); - }); - }); - - it('should work without options or settings', (done) => { - walk('fixtures', (error, entries) => { - assert.strictEqual(error, null); - assert.strictEqual(entries.length, 3); - done(); - }); - }); - - it('should work with options', (done) => { - walk('fixtures', { entryFilter }, (error, entries) => { - assert.strictEqual(error, null); - assert.strictEqual(entries.length, 2); - done(); - }); - }); - - it('should work with settings', (done) => { - const settings = new Settings({ entryFilter }); - - walk('fixtures', settings, (error, entries) => { - assert.strictEqual(error, null); - assert.strictEqual(entries.length, 2); - done(); - }); - }); - }); - - describe('.walkStream', () => { - it('should throw an error for non-exist directory', async () => { - const stream = walkStream('non-exist-directory'); - - await assert.rejects(() => streamToPromise(stream), (error: Errno) => error.code === 'ENOENT'); - }); - - it('should work without options or settings', async () => { - const stream = walkStream('fixtures'); - const actual = await streamToPromise(stream); - - assert.strictEqual(actual.length, 3); - }); - - it('should work with options', async () => { - const stream = walkStream('fixtures', { entryFilter }); - const actual = await streamToPromise(stream); - - assert.strictEqual(actual.length, 2); - }); - - it('should work with settings', async () => { - const settings = new Settings({ entryFilter }); - const stream = walkStream('fixtures', settings); - const actual = await streamToPromise(stream); - - assert.strictEqual(actual.length, 2); - }); - }); - - describe('.walkSync', () => { - it('should throw an error for non-exist directory', () => { - const matcher = (error: Errno): boolean => error.code === 'ENOENT'; - - assert.throws(() => walkSync('non-exist-directory'), matcher); - }); - - it('should work without options or settings', () => { - const actual = walkSync('fixtures'); - - assert.strictEqual(actual.length, 3); - }); - - it('should work with options', () => { - const actual = walkSync('fixtures', { entryFilter }); - - assert.strictEqual(actual.length, 2); - }); - - it('should work with settings', () => { - const settings = new Settings({ entryFilter }); - - const actual = walkSync('fixtures', settings); - - assert.strictEqual(actual.length, 2); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/src/index.ts b/node_modules/@nodelib/fs.walk/src/index.ts deleted file mode 100644 index 5600dbfd..00000000 --- a/node_modules/@nodelib/fs.walk/src/index.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { Readable } from 'stream'; - -import { Dirent, FileSystemAdapter } from '@nodelib/fs.scandir'; - -import AsyncProvider, { AsyncCallback } from './providers/async'; -import StreamProvider from './providers/stream'; -import SyncProvider from './providers/sync'; -import Settings, { DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction, Options } from './settings'; -import { Entry } from './types'; - -function walk(directory: string, callback: AsyncCallback): void; -function walk(directory: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -function walk(directory: string, optionsOrSettingsOrCallback: Options | Settings | AsyncCallback, callback?: AsyncCallback): void { - if (typeof optionsOrSettingsOrCallback === 'function') { - return new AsyncProvider(directory, getSettings()).read(optionsOrSettingsOrCallback); - } - - new AsyncProvider(directory, getSettings(optionsOrSettingsOrCallback)).read(callback as AsyncCallback); -} - -// https://github.com/typescript-eslint/typescript-eslint/issues/60 -// eslint-disable-next-line no-redeclare -declare namespace walk { - function __promisify__(directory: string, optionsOrSettings?: Options | Settings): Promise; -} - -function walkSync(directory: string, optionsOrSettings?: Options | Settings): Entry[] { - const settings = getSettings(optionsOrSettings); - const provider = new SyncProvider(directory, settings); - - return provider.read(); -} - -function walkStream(directory: string, optionsOrSettings?: Options | Settings): Readable { - const settings = getSettings(optionsOrSettings); - const provider = new StreamProvider(directory, settings); - - return provider.read(); -} - -function getSettings(settingsOrOptions: Settings | Options = {}): Settings { - if (settingsOrOptions instanceof Settings) { - return settingsOrOptions; - } - - return new Settings(settingsOrOptions); -} - -export { - walk, - walkSync, - walkStream, - Settings, - - AsyncCallback, - Dirent, - Entry, - FileSystemAdapter, - Options, - DeepFilterFunction, - EntryFilterFunction, - ErrorFilterFunction -}; diff --git a/node_modules/@nodelib/fs.walk/src/providers/async.spec.ts b/node_modules/@nodelib/fs.walk/src/providers/async.spec.ts deleted file mode 100644 index 83f540ad..00000000 --- a/node_modules/@nodelib/fs.walk/src/providers/async.spec.ts +++ /dev/null @@ -1,57 +0,0 @@ -import * as assert from 'assert'; - -import * as sinon from 'sinon'; - -import AsyncReader from '../readers/async'; -import Settings from '../settings'; -import * as tests from '../tests'; -import AsyncProvider from './async'; - -class TestProvider extends AsyncProvider { - protected readonly _reader: AsyncReader = new tests.TestAsyncReader() as unknown as AsyncReader; - - constructor(_root: string, _settings: Settings = new Settings()) { - super(_root, _settings); - } - - public get reader(): tests.TestAsyncReader { - return this._reader as unknown as tests.TestAsyncReader; - } -} - -describe('Providers → Async', () => { - describe('.read', () => { - it('should call reader function with correct set of arguments', () => { - const provider = new TestProvider('directory'); - const fakeCallback = sinon.stub(); - - provider.read(fakeCallback); - - assert.ok(provider.reader.read.called); - }); - - it('should call callback with error for failed launch', () => { - const provider = new TestProvider('directory'); - const fakeCallback = sinon.stub(); - - provider.reader.onError.yields(tests.EPERM_ERRNO); - - provider.read(fakeCallback); - - assert.deepStrictEqual(fakeCallback.args, [[tests.EPERM_ERRNO]]); - }); - - it('should push entries to storage and call callback with array of entries', () => { - const provider = new TestProvider('directory'); - const fakeEntry = tests.buildFakeFileEntry(); - const fakeCallback = sinon.stub(); - - provider.reader.onEntry.yields(fakeEntry); - provider.reader.onEnd.yields(); - - provider.read(fakeCallback); - - assert.deepStrictEqual(fakeCallback.args, [[null, [fakeEntry]]]); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/src/providers/async.ts b/node_modules/@nodelib/fs.walk/src/providers/async.ts deleted file mode 100644 index 0a59460c..00000000 --- a/node_modules/@nodelib/fs.walk/src/providers/async.ts +++ /dev/null @@ -1,40 +0,0 @@ -import AsyncReader from '../readers/async'; -import Settings from '../settings'; -import { Entry, Errno } from '../types'; - -type FailureCallback = (err: Errno) => void; -type SuccessCallback = (err: null, entries: Entry[]) => void; - -export type AsyncCallback = (err: Errno, entries: Entry[]) => void; - -export default class AsyncProvider { - protected readonly _reader: AsyncReader = new AsyncReader(this._root, this._settings); - - private readonly _storage: Set = new Set(); - - constructor(private readonly _root: string, private readonly _settings: Settings) { } - - public read(callback: AsyncCallback): void { - this._reader.onError((error) => { - callFailureCallback(callback, error); - }); - - this._reader.onEntry((entry: Entry) => { - this._storage.add(entry); - }); - - this._reader.onEnd(() => { - callSuccessCallback(callback, [...this._storage]); - }); - - this._reader.read(); - } -} - -function callFailureCallback(callback: AsyncCallback, error: Errno): void { - (callback as FailureCallback)(error); -} - -function callSuccessCallback(callback: AsyncCallback, entries: Entry[]): void { - (callback as unknown as SuccessCallback)(null, entries); -} diff --git a/node_modules/@nodelib/fs.walk/src/providers/index.ts b/node_modules/@nodelib/fs.walk/src/providers/index.ts deleted file mode 100644 index fa062301..00000000 --- a/node_modules/@nodelib/fs.walk/src/providers/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import AsyncProvider from './async'; -import StreamProvider from './stream'; -import SyncProvider from './sync'; - -export { - AsyncProvider, - StreamProvider, - SyncProvider -}; diff --git a/node_modules/@nodelib/fs.walk/src/providers/stream.spec.ts b/node_modules/@nodelib/fs.walk/src/providers/stream.spec.ts deleted file mode 100644 index b96f41c4..00000000 --- a/node_modules/@nodelib/fs.walk/src/providers/stream.spec.ts +++ /dev/null @@ -1,90 +0,0 @@ -import * as assert from 'assert'; -import { Readable } from 'stream'; - -import * as sinon from 'sinon'; - -import AsyncReader from '../readers/async'; -import Settings from '../settings'; -import * as tests from '../tests'; -import StreamProvider from './stream'; - -class TestProvider extends StreamProvider { - protected readonly _reader: AsyncReader = new tests.TestAsyncReader() as unknown as AsyncReader; - - constructor(_root: string, _settings: Settings = new Settings()) { - super(_root, _settings); - - this._stream.emit = sinon.stub(); - this._stream.push = sinon.stub(); - } - - public get reader(): tests.TestAsyncReader { - return this._reader as unknown as tests.TestAsyncReader; - } - - public get stream(): sinon.SinonStubbedInstance { - return this._stream as unknown as sinon.SinonStubbedInstance; - } -} - -describe('Providers → Stream', () => { - describe('.read', () => { - it('should return stream', () => { - const provider = new TestProvider('directory'); - - const stream = provider.read(); - - assert.ok(stream instanceof Readable); - }); - - it('should call reader function with correct set of arguments', () => { - const provider = new TestProvider('directory'); - - provider.read(); - - assert.ok(provider.reader.read.called); - }); - - it('should re-emit the "error" event from reader', () => { - const provider = new TestProvider('directory'); - - provider.reader.onError.yields(tests.EPERM_ERRNO); - - provider.read(); - - assert.deepStrictEqual(provider.stream.emit.args, [['error', tests.EPERM_ERRNO]]); - }); - - it('should call the "push" method with entry value for the "entry" event from reader', () => { - const provider = new TestProvider('directory'); - const fakeEntry = tests.buildFakeFileEntry(); - - provider.reader.onEntry.yields(fakeEntry); - - provider.read(); - - assert.deepStrictEqual(provider.stream.push.args, [[fakeEntry]]); - }); - - it('should call the "push" method with "null" value for the "end" event from reader', () => { - const provider = new TestProvider('directory'); - - provider.reader.onEnd.yields(); - - provider.read(); - - assert.deepStrictEqual(provider.stream.push.args, [[null]]); - }); - - it('should do not destroy reader when it is already destroyed', () => { - const provider = new TestProvider('directory'); - - const stream = provider.read(); - - stream.destroy(); - - assert.ok(stream.destroyed); - assert.doesNotThrow(() => stream.destroy()); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/src/providers/stream.ts b/node_modules/@nodelib/fs.walk/src/providers/stream.ts deleted file mode 100644 index a43a93a0..00000000 --- a/node_modules/@nodelib/fs.walk/src/providers/stream.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { Readable } from 'stream'; -import AsyncReader from '../readers/async'; -import Settings from '../settings'; - -export default class StreamProvider { - protected readonly _reader: AsyncReader = new AsyncReader(this._root, this._settings); - protected readonly _stream: Readable = new Readable({ - objectMode: true, - read: () => { /* noop */ }, - destroy: () => { - if (!this._reader.isDestroyed) { - this._reader.destroy(); - } - } - }); - - constructor(private readonly _root: string, private readonly _settings: Settings) { } - - public read(): Readable { - this._reader.onError((error) => { - this._stream.emit('error', error); - }); - - this._reader.onEntry((entry) => { - this._stream.push(entry); - }); - - this._reader.onEnd(() => { - this._stream.push(null); - }); - - this._reader.read(); - - return this._stream; - } -} diff --git a/node_modules/@nodelib/fs.walk/src/providers/sync.spec.ts b/node_modules/@nodelib/fs.walk/src/providers/sync.spec.ts deleted file mode 100644 index 1f45f831..00000000 --- a/node_modules/@nodelib/fs.walk/src/providers/sync.spec.ts +++ /dev/null @@ -1,34 +0,0 @@ -import * as assert from 'assert'; - -import SyncReader from '../readers/sync'; -import Settings from '../settings'; -import * as tests from '../tests'; -import SyncProvider from './sync'; - -class TestProvider extends SyncProvider { - protected readonly _reader: SyncReader = new tests.TestSyncReader() as unknown as SyncReader; - - constructor(_root: string, _settings: Settings = new Settings()) { - super(_root, _settings); - } - - public get reader(): tests.TestSyncReader { - return this._reader as unknown as tests.TestSyncReader; - } -} - -describe('Providers → Sync', () => { - describe('.read', () => { - it('should call reader function with correct set of arguments and got result', () => { - const provider = new TestProvider('directory'); - const fakeEntry = tests.buildFakeFileEntry(); - - provider.reader.read.returns([fakeEntry]); - - const actual = provider.read(); - - assert.deepStrictEqual(actual, [fakeEntry]); - assert.ok(provider.reader.read.called); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/src/providers/sync.ts b/node_modules/@nodelib/fs.walk/src/providers/sync.ts deleted file mode 100644 index 3a951c9d..00000000 --- a/node_modules/@nodelib/fs.walk/src/providers/sync.ts +++ /dev/null @@ -1,13 +0,0 @@ -import SyncReader from '../readers/sync'; -import Settings from '../settings'; -import { Entry } from '../types'; - -export default class SyncProvider { - protected readonly _reader: SyncReader = new SyncReader(this._root, this._settings); - - constructor(private readonly _root: string, private readonly _settings: Settings) { } - - public read(): Entry[] { - return this._reader.read(); - } -} diff --git a/node_modules/@nodelib/fs.walk/src/readers/async.spec.ts b/node_modules/@nodelib/fs.walk/src/readers/async.spec.ts deleted file mode 100644 index 07754390..00000000 --- a/node_modules/@nodelib/fs.walk/src/readers/async.spec.ts +++ /dev/null @@ -1,232 +0,0 @@ -import * as assert from 'assert'; -import * as path from 'path'; - -import * as fsScandir from '@nodelib/fs.scandir'; -import * as sinon from 'sinon'; - -import Settings from '../settings'; -import * as tests from '../tests'; -import { Entry } from '../types'; -import AsyncReader from './async'; - -type ScandirSignature = typeof fsScandir.scandir; - -class TestReader extends AsyncReader { - protected readonly _scandir: ScandirSignature = sinon.stub() as unknown as ScandirSignature; - - constructor(_root: string, _settings: Settings = new Settings()) { - super(_root, _settings); - } - - public get scandir(): sinon.SinonStub { - return this._scandir as unknown as sinon.SinonStub; - } -} - -describe('Readers → Async', () => { - describe('.read', () => { - it('should emit "error" event when the first call of scandir is broken', (done) => { - const reader = new TestReader('non-exist-directory'); - - reader.scandir.yields(tests.EPERM_ERRNO); - - reader.onError((error) => { - assert.ok(error); - done(); - }); - - reader.read(); - }); - - it('should emit "end" event when the first call of scandir is broken but this error can be suppressed', (done) => { - const settings = new Settings({ - errorFilter: (error) => error.code === 'EPERM' - }); - const reader = new TestReader('non-exist-directory', settings); - - reader.scandir.yields(tests.EPERM_ERRNO); - - reader.onEnd(() => { - done(); - }); - - reader.read(); - }); - - it('should do not emit events after first broken scandir call', (done) => { - const reader = new TestReader('directory'); - - const firstFakeDirectoryEntry = tests.buildFakeDirectoryEntry({ name: 'a', path: 'directory/a' }); - const secondFakeDirectoryEntry = tests.buildFakeDirectoryEntry({ name: 'b', path: 'directory/b' }); - - reader.scandir.onFirstCall().yields(null, [firstFakeDirectoryEntry, secondFakeDirectoryEntry]); - reader.scandir.onSecondCall().yieldsAsync(tests.EPERM_ERRNO); - reader.scandir.onThirdCall().yieldsAsync(tests.EPERM_ERRNO); - - /** - * If the behavior is broken, then a third scandir call will trigger an unhandled error. - */ - reader.onError((error) => { - assert.ok(error); - done(); - }); - - reader.read(); - }); - - it('should return entries', (done) => { - const reader = new TestReader('directory'); - - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - - reader.scandir.onFirstCall().yields(null, [fakeDirectoryEntry]); - reader.scandir.onSecondCall().yields(null, [fakeFileEntry]); - - const entries: Entry[] = []; - - reader.onEntry((entry) => entries.push(entry)); - - reader.onEnd(() => { - assert.deepStrictEqual(entries, [fakeDirectoryEntry, fakeFileEntry]); - done(); - }); - - reader.read(); - }); - - it('should push to results only directories', (done) => { - const settings = new Settings({ entryFilter: (entry) => !entry.dirent.isFile() }); - const reader = new TestReader('directory', settings); - - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - - reader.scandir.onFirstCall().yields(null, [fakeDirectoryEntry]); - reader.scandir.onSecondCall().yields(null, [fakeFileEntry]); - - const entries: Entry[] = []; - - reader.onEntry((entry) => entries.push(entry)); - - reader.onEnd(() => { - assert.deepStrictEqual(entries, [fakeDirectoryEntry]); - done(); - }); - - reader.read(); - }); - - it('should do not read root directory', (done) => { - const settings = new Settings({ deepFilter: () => false }); - const reader = new TestReader('directory', settings); - - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - - reader.scandir.onFirstCall().yields(null, [fakeDirectoryEntry]); - reader.scandir.onSecondCall().yields(null, [fakeFileEntry]); - - const entries: Entry[] = []; - - reader.onEntry((entry) => entries.push(entry)); - - reader.onEnd(() => { - assert.deepStrictEqual(entries, [fakeDirectoryEntry]); - done(); - }); - - reader.read(); - }); - - it('should set base path to entry when the `basePath` option is exist', (done) => { - const settings = new Settings({ basePath: 'base' }); - const reader = new TestReader('directory', settings); - - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - - reader.scandir.onFirstCall().yields(null, [fakeDirectoryEntry]); - reader.scandir.onSecondCall().yields(null, [fakeFileEntry]); - - const entries: Entry[] = []; - - reader.onEntry((entry) => entries.push(entry)); - - reader.onEnd(() => { - assert.strictEqual(entries[0].path, path.join('base', fakeDirectoryEntry.name)); - assert.strictEqual(entries[1].path, path.join('base', 'fake', fakeFileEntry.name)); - done(); - }); - - reader.read(); - }); - - it('should set base path to entry when the `basePath` option is exist and value is an empty string', (done) => { - const settings = new Settings({ basePath: '' }); - const reader = new TestReader('directory', settings); - - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - - reader.scandir.onFirstCall().yields(null, [fakeDirectoryEntry]); - reader.scandir.onSecondCall().yields(null, [fakeFileEntry]); - - const entries: Entry[] = []; - - reader.onEntry((entry) => entries.push(entry)); - - reader.onEnd(() => { - assert.strictEqual(entries[0].path, path.join(fakeDirectoryEntry.name)); - assert.strictEqual(entries[1].path, path.join('fake', fakeFileEntry.name)); - done(); - }); - - reader.read(); - }); - }); - - describe('.destroy', () => { - it('should do not emit entries after destroy', (done) => { - const reader = new TestReader('directory'); - - const firstFakeDirectoryEntry = tests.buildFakeDirectoryEntry({ name: 'a', path: 'directory/a' }); - const fakeFileEntry = tests.buildFakeFileEntry(); - - reader.scandir.onFirstCall().yields(null, [firstFakeDirectoryEntry]); - reader.scandir.onSecondCall().yields(null, [fakeFileEntry]); - - reader.onEntry((entry) => { - if (entry.name === 'a') { - reader.destroy(); - } else { - assert.fail('should do not emit entries after destroy'); - } - }); - - reader.onEnd(() => { - done(); - }); - - reader.read(); - }); - - it('should mark stream as "destroyed" after first destroy', () => { - const reader = new TestReader('directory'); - - reader.destroy(); - - assert.ok(reader.isDestroyed); - }); - - it('should throw an error when trying to destroy reader twice', () => { - const reader = new TestReader('directory'); - - const expectedErrorMessageRe = /The reader is already destroyed/; - - reader.destroy(); - - assert.throws(() => reader.destroy(), expectedErrorMessageRe); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/src/readers/async.ts b/node_modules/@nodelib/fs.walk/src/readers/async.ts deleted file mode 100644 index d82884a9..00000000 --- a/node_modules/@nodelib/fs.walk/src/readers/async.ts +++ /dev/null @@ -1,126 +0,0 @@ -import { EventEmitter } from 'events'; - -import * as fsScandir from '@nodelib/fs.scandir'; -import * as fastq from 'fastq'; - -import Settings from '../settings'; -import { Entry, Errno, QueueItem } from '../types'; -import * as common from './common'; -import Reader from './reader'; - -type EntryEventCallback = (entry: Entry) => void; -type ErrorEventCallback = (error: Errno) => void; -type EndEventCallback = () => void; - -export default class AsyncReader extends Reader { - protected readonly _scandir: typeof fsScandir.scandir = fsScandir.scandir; - protected readonly _emitter: EventEmitter = new EventEmitter(); - - private readonly _queue: fastq.queue = fastq(this._worker.bind(this), this._settings.concurrency); - private _isFatalError: boolean = false; - private _isDestroyed: boolean = false; - - constructor(_root: string, protected readonly _settings: Settings) { - super(_root, _settings); - - this._queue.drain = () => { - if (!this._isFatalError) { - this._emitter.emit('end'); - } - }; - } - - public read(): EventEmitter { - this._isFatalError = false; - this._isDestroyed = false; - - setImmediate(() => { - this._pushToQueue(this._root, this._settings.basePath); - }); - - return this._emitter; - } - - public get isDestroyed(): boolean { - return this._isDestroyed; - } - - public destroy(): void { - if (this._isDestroyed) { - throw new Error('The reader is already destroyed'); - } - - this._isDestroyed = true; - this._queue.killAndDrain(); - } - - public onEntry(callback: EntryEventCallback): void { - this._emitter.on('entry', callback); - } - - public onError(callback: ErrorEventCallback): void { - this._emitter.once('error', callback); - } - - public onEnd(callback: EndEventCallback): void { - this._emitter.once('end', callback); - } - - private _pushToQueue(directory: string, base?: string): void { - const queueItem: QueueItem = { directory, base }; - - this._queue.push(queueItem, (error: Error | null) => { - if (error !== null) { - this._handleError(error); - } - }); - } - - private _worker(item: QueueItem, done: fastq.done): void { - this._scandir(item.directory, this._settings.fsScandirSettings, (error: NodeJS.ErrnoException | null, entries) => { - if (error !== null) { - return done(error, undefined); - } - - for (const entry of entries) { - this._handleEntry(entry, item.base); - } - - done(null as unknown as Error, undefined); - }); - } - - private _handleError(error: Error): void { - if (this._isDestroyed || !common.isFatalError(this._settings, error)) { - return; - } - - this._isFatalError = true; - this._isDestroyed = true; - this._emitter.emit('error', error); - } - - private _handleEntry(entry: Entry, base?: string): void { - if (this._isDestroyed || this._isFatalError) { - return; - } - - const fullpath = entry.path; - - if (base !== undefined) { - entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); - } - - if (common.isAppliedFilter(this._settings.entryFilter, entry)) { - this._emitEntry(entry); - } - - if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, entry.path); - } - } - - private _emitEntry(entry: Entry): void { - this._emitter.emit('entry', entry); - } -} diff --git a/node_modules/@nodelib/fs.walk/src/readers/common.spec.ts b/node_modules/@nodelib/fs.walk/src/readers/common.spec.ts deleted file mode 100644 index b883dddc..00000000 --- a/node_modules/@nodelib/fs.walk/src/readers/common.spec.ts +++ /dev/null @@ -1,114 +0,0 @@ -import * as assert from 'assert'; -import * as path from 'path'; - -import Settings from '../settings'; -import * as tests from '../tests'; -import * as common from './common'; - -describe('Readers → Common', () => { - describe('.isFatalError', () => { - it('should return true when filter is not defined', () => { - const settings = new Settings(); - - const actual = common.isFatalError(settings, tests.EPERM_ERRNO); - - assert.ok(actual); - }); - - it('should return true when the error cannot be suppressed', () => { - const settings = new Settings({ - errorFilter: (error) => error.code === 'ENOENT' - }); - - const actual = common.isFatalError(settings, tests.EPERM_ERRNO); - - assert.ok(actual); - }); - - it('should return false when the error can be suppressed', () => { - const settings = new Settings({ - errorFilter: (error) => error.code === 'EPERM' - }); - - const actual = common.isFatalError(settings, tests.EPERM_ERRNO); - - assert.ok(!actual); - }); - }); - - describe('.isAppliedFilter', () => { - it('should return true when the filter is not defined', () => { - const settings = new Settings(); - const entry = tests.buildFakeFileEntry(); - - const actual = common.isAppliedFilter(settings.entryFilter, entry); - - assert.ok(actual); - }); - - it('should return true when the entry will be applied', () => { - const settings = new Settings({ - entryFilter: (entry) => entry.name === 'fake.txt' - }); - const fakeEntry = tests.buildFakeFileEntry(); - - const actual = common.isAppliedFilter(settings.entryFilter, fakeEntry); - - assert.ok(actual); - }); - - it('should return false when the entry will be skipped', () => { - const settings = new Settings({ - entryFilter: (entry) => entry.name !== 'fake.txt' - }); - const fakeEntry = tests.buildFakeFileEntry(); - - const actual = common.isAppliedFilter(settings.entryFilter, fakeEntry); - - assert.ok(!actual); - }); - }); - - describe('.replacePathSegmentSeparator', () => { - it('should replace path segment separator', () => { - const filepath = path.join('directory', 'file.txt'); - - const expected = 'directory_file.txt'; - - const actual = common.replacePathSegmentSeparator(filepath, '_'); - - assert.strictEqual(actual, expected); - }); - }); - - describe('.joinPathSegments', () => { - it('should return concatenated string', () => { - const expected = 'a&b'; - - const actual = common.joinPathSegments('a', 'b', '&'); - - assert.strictEqual(actual, expected); - }); - - it('should return second part of path when the first path is an empty string', () => { - const expected = 'b'; - - const actual = common.joinPathSegments('', 'b', '&'); - - assert.strictEqual(actual, expected); - }); - - it('should return correct string when the first segment ens with the separator symbol', () => { - // Unix - assert.strictEqual(common.joinPathSegments('/', 'a', '/'), '/a'); - assert.strictEqual(common.joinPathSegments('//', 'a', '/'), '//a'); - assert.strictEqual(common.joinPathSegments('/a/', 'b', '/'), '/a/b'); - - // Windows - assert.strictEqual(common.joinPathSegments('C:/', 'Users', '/'), 'C:/Users'); - assert.strictEqual(common.joinPathSegments('C:\\', 'Users', '\\'), 'C:\\Users'); - assert.strictEqual(common.joinPathSegments('//?/C:/', 'Users', '/'), '//?/C:/Users'); - assert.strictEqual(common.joinPathSegments('\\\\?\\C:\\', 'Users', '\\'), '\\\\?\\C:\\Users'); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/src/readers/common.ts b/node_modules/@nodelib/fs.walk/src/readers/common.ts deleted file mode 100644 index eeec6674..00000000 --- a/node_modules/@nodelib/fs.walk/src/readers/common.ts +++ /dev/null @@ -1,33 +0,0 @@ -import Settings, { FilterFunction } from '../settings'; -import { Errno } from '../types'; - -export function isFatalError(settings: Settings, error: Errno): boolean { - if (settings.errorFilter === null) { - return true; - } - - return !settings.errorFilter(error); -} - -export function isAppliedFilter(filter: FilterFunction | null, value: T): boolean { - return filter === null || filter(value); -} - -export function replacePathSegmentSeparator(filepath: string, separator: string): string { - return filepath.split(/[/\\]/).join(separator); -} - -export function joinPathSegments(a: string, b: string, separator: string): string { - if (a === '') { - return b; - } - - /** - * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). - */ - if (a.endsWith(separator)) { - return a + b; - } - - return a + separator + b; -} diff --git a/node_modules/@nodelib/fs.walk/src/readers/reader.spec.ts b/node_modules/@nodelib/fs.walk/src/readers/reader.spec.ts deleted file mode 100644 index 2ac71bfd..00000000 --- a/node_modules/@nodelib/fs.walk/src/readers/reader.spec.ts +++ /dev/null @@ -1,30 +0,0 @@ -import * as assert from 'assert'; -import * as path from 'path'; - -import Settings, { Options } from '../settings'; -import Reader from './reader'; - -class TestReader extends Reader { - public get root(): string { - return this._root; - } -} - -function getReader(root: string, options: Options = {}): TestReader { - return new TestReader(root, new Settings(options)); -} - -describe('Readers → Reader', () => { - describe('Constructor', () => { - it('should return root path with replaced path segment separators', () => { - const root = path.join('directory', 'file.txt'); - const reader = getReader(root, { pathSegmentSeparator: '_' }); - - const expected = 'directory_file.txt'; - - const actual = reader.root; - - assert.strictEqual(actual, expected); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/src/readers/reader.ts b/node_modules/@nodelib/fs.walk/src/readers/reader.ts deleted file mode 100644 index fa434ad4..00000000 --- a/node_modules/@nodelib/fs.walk/src/readers/reader.ts +++ /dev/null @@ -1,8 +0,0 @@ -import Settings from '../settings'; -import * as common from './common'; - -export default class Reader { - constructor(protected readonly _root: string, protected readonly _settings: Settings) { - this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); - } -} diff --git a/node_modules/@nodelib/fs.walk/src/readers/sync.spec.ts b/node_modules/@nodelib/fs.walk/src/readers/sync.spec.ts deleted file mode 100644 index d850d136..00000000 --- a/node_modules/@nodelib/fs.walk/src/readers/sync.spec.ts +++ /dev/null @@ -1,126 +0,0 @@ -import * as assert from 'assert'; -import * as path from 'path'; - -import * as sinon from 'sinon'; - -import Settings from '../settings'; -import * as tests from '../tests'; -import SyncReader from './sync'; - -class TestReader extends SyncReader { - protected readonly _scandir: sinon.SinonStub = sinon.stub(); - - constructor(_root: string, _settings: Settings = new Settings()) { - super(_root, _settings); - } - - public get scandir(): sinon.SinonStub { - return this._scandir; - } -} - -describe('Readers → Sync', () => { - describe('.read', () => { - it('should throw an error when the first call of scandir is broken', () => { - const reader = new TestReader('non-exist-directory'); - - reader.scandir.throws(tests.EPERM_ERRNO); - - assert.throws(() => reader.read(), { code: 'EPERM' }); - }); - - it('should return empty array when the first call of scandir is broken but this error can be suppressed', () => { - const settings = new Settings({ - errorFilter: (error) => error.code === 'EPERM' - }); - const reader = new TestReader('non-exist-directory', settings); - - reader.scandir.throws(tests.EPERM_ERRNO); - - const actual = reader.read(); - - assert.deepStrictEqual(actual, []); - }); - - it('should return entries', () => { - const reader = new TestReader('directory'); - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - - reader.scandir.onFirstCall().returns([fakeDirectoryEntry]); - reader.scandir.onSecondCall().returns([fakeFileEntry]); - - const expected = [fakeDirectoryEntry, fakeFileEntry]; - - const actual = reader.read(); - - assert.deepStrictEqual(actual, expected); - }); - - it('should push to results only directories', () => { - const settings = new Settings({ entryFilter: (entry) => !entry.dirent.isFile() }); - const reader = new TestReader('directory', settings); - - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - - reader.scandir.onFirstCall().returns([fakeDirectoryEntry]); - reader.scandir.onSecondCall().returns([fakeFileEntry]); - - const expected = [fakeDirectoryEntry]; - - const actual = reader.read(); - - assert.deepStrictEqual(actual, expected); - }); - - it('should do not read root directory', () => { - const settings = new Settings({ deepFilter: () => false }); - const reader = new TestReader('directory', settings); - - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - - reader.scandir.onFirstCall().returns([fakeDirectoryEntry]); - reader.scandir.onSecondCall().returns([fakeFileEntry]); - - const expected = [fakeDirectoryEntry]; - - const actual = reader.read(); - - assert.deepStrictEqual(actual, expected); - }); - - it('should set base path to entry when the `basePath` option is exist', () => { - const settings = new Settings({ basePath: 'base' }); - const reader = new TestReader('directory', settings); - - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - - reader.scandir.onFirstCall().returns([fakeDirectoryEntry]); - reader.scandir.onSecondCall().returns([fakeFileEntry]); - - const actual = reader.read(); - - assert.strictEqual(actual[0].path, path.join('base', fakeDirectoryEntry.name)); - assert.strictEqual(actual[1].path, path.join('base', 'fake', fakeFileEntry.name)); - }); - - it('should set base path to entry when the `basePath` option is exist and value is an empty string', () => { - const settings = new Settings({ basePath: '' }); - const reader = new TestReader('directory', settings); - - const fakeDirectoryEntry = tests.buildFakeDirectoryEntry(); - const fakeFileEntry = tests.buildFakeFileEntry(); - - reader.scandir.onFirstCall().returns([fakeDirectoryEntry]); - reader.scandir.onSecondCall().returns([fakeFileEntry]); - - const actual = reader.read(); - - assert.strictEqual(actual[0].path, fakeDirectoryEntry.name); - assert.strictEqual(actual[1].path, path.join('fake', fakeFileEntry.name)); - }); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/src/readers/sync.ts b/node_modules/@nodelib/fs.walk/src/readers/sync.ts deleted file mode 100644 index 5e24f873..00000000 --- a/node_modules/@nodelib/fs.walk/src/readers/sync.ts +++ /dev/null @@ -1,69 +0,0 @@ -import * as fsScandir from '@nodelib/fs.scandir'; - -import { Entry, Errno, QueueItem } from '../types'; -import * as common from './common'; -import Reader from './reader'; - -export default class SyncReader extends Reader { - protected readonly _scandir: typeof fsScandir.scandirSync = fsScandir.scandirSync; - - private readonly _storage: Set = new Set(); - private readonly _queue: Set = new Set(); - - public read(): Entry[] { - this._pushToQueue(this._root, this._settings.basePath); - this._handleQueue(); - - return [...this._storage]; - } - - private _pushToQueue(directory: string, base?: string): void { - this._queue.add({ directory, base }); - } - - private _handleQueue(): void { - for (const item of this._queue.values()) { - this._handleDirectory(item.directory, item.base); - } - } - - private _handleDirectory(directory: string, base?: string): void { - try { - const entries = this._scandir(directory, this._settings.fsScandirSettings); - - for (const entry of entries) { - this._handleEntry(entry, base); - } - } catch (error) { - this._handleError(error as Errno); - } - } - - private _handleError(error: Errno): void { - if (!common.isFatalError(this._settings, error)) { - return; - } - - throw error; - } - - private _handleEntry(entry: Entry, base?: string): void { - const fullpath = entry.path; - - if (base !== undefined) { - entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); - } - - if (common.isAppliedFilter(this._settings.entryFilter, entry)) { - this._pushToStorage(entry); - } - - if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, entry.path); - } - } - - private _pushToStorage(entry: Entry): void { - this._storage.add(entry); - } -} diff --git a/node_modules/@nodelib/fs.walk/src/settings.spec.ts b/node_modules/@nodelib/fs.walk/src/settings.spec.ts deleted file mode 100644 index 2e1419e0..00000000 --- a/node_modules/@nodelib/fs.walk/src/settings.spec.ts +++ /dev/null @@ -1,33 +0,0 @@ -import * as assert from 'assert'; - -import * as fsScandir from '@nodelib/fs.scandir'; - -import Settings from './settings'; - -describe('Settings', () => { - it('should return instance with default values', () => { - const fsWalkSettings = new Settings(); - const fsScandirSettings = new fsScandir.Settings({ - followSymbolicLinks: undefined, - fs: undefined, - pathSegmentSeparator: undefined, - stats: undefined, - throwErrorOnBrokenSymbolicLink: undefined - }); - - assert.strictEqual(fsWalkSettings.basePath, undefined); - assert.strictEqual(fsWalkSettings.concurrency, Infinity); - assert.strictEqual(fsWalkSettings.deepFilter, null); - assert.strictEqual(fsWalkSettings.entryFilter, null); - assert.strictEqual(fsWalkSettings.errorFilter, null); - assert.deepStrictEqual(fsWalkSettings.fsScandirSettings, fsScandirSettings); - }); - - it('should return instance with custom values', () => { - const filter = (): boolean => true; - - const fsWalkSettings = new Settings({ entryFilter: filter }); - - assert.strictEqual(fsWalkSettings.entryFilter, filter); - }); -}); diff --git a/node_modules/@nodelib/fs.walk/src/settings.ts b/node_modules/@nodelib/fs.walk/src/settings.ts deleted file mode 100644 index 3b7a25ad..00000000 --- a/node_modules/@nodelib/fs.walk/src/settings.ts +++ /dev/null @@ -1,46 +0,0 @@ -import * as path from 'path'; - -import * as fsScandir from '@nodelib/fs.scandir'; - -import { Entry, Errno } from './types'; - -export type FilterFunction = (value: T) => boolean; -export type DeepFilterFunction = FilterFunction; -export type EntryFilterFunction = FilterFunction; -export type ErrorFilterFunction = FilterFunction; - -export type Options = { - basePath?: string; - concurrency?: number; - deepFilter?: DeepFilterFunction; - entryFilter?: EntryFilterFunction; - errorFilter?: ErrorFilterFunction; - followSymbolicLinks?: boolean; - fs?: Partial; - pathSegmentSeparator?: string; - stats?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -}; - -export default class Settings { - public readonly basePath?: string = this._getValue(this._options.basePath, undefined); - public readonly concurrency: number = this._getValue(this._options.concurrency, Infinity); - public readonly deepFilter: DeepFilterFunction | null = this._getValue(this._options.deepFilter, null); - public readonly entryFilter: EntryFilterFunction | null = this._getValue(this._options.entryFilter, null); - public readonly errorFilter: ErrorFilterFunction | null = this._getValue(this._options.errorFilter, null); - public readonly pathSegmentSeparator: string = this._getValue(this._options.pathSegmentSeparator, path.sep); - - public readonly fsScandirSettings: fsScandir.Settings = new fsScandir.Settings({ - followSymbolicLinks: this._options.followSymbolicLinks, - fs: this._options.fs, - pathSegmentSeparator: this._options.pathSegmentSeparator, - stats: this._options.stats, - throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink - }); - - constructor(private readonly _options: Options = {}) { } - - private _getValue(option: T | undefined, value: T): T { - return option ?? value; - } -} diff --git a/node_modules/@nodelib/fs.walk/src/tests/index.ts b/node_modules/@nodelib/fs.walk/src/tests/index.ts deleted file mode 100644 index 651b2463..00000000 --- a/node_modules/@nodelib/fs.walk/src/tests/index.ts +++ /dev/null @@ -1,40 +0,0 @@ -import * as sinon from 'sinon'; - -import { Dirent } from '../../../fs.macchiato'; -import { Entry, Errno } from '../types'; - -export function buildFakeFileEntry(entry?: Partial): Entry { - return { - name: 'fake.txt', - path: 'directory/fake.txt', - dirent: new Dirent({ name: 'fake.txt' }), - ...entry - }; -} - -export function buildFakeDirectoryEntry(entry?: Partial): Entry { - return { - name: 'fake', - path: 'directory/fake', - dirent: new Dirent({ name: 'fake', isFile: false, isDirectory: true }), - ...entry - }; -} - -export const EPERM_ERRNO: Errno = { - name: 'EPERM', - code: 'EPERM', - message: 'EPERM' -}; - -export class TestAsyncReader { - public read: sinon.SinonStub = sinon.stub(); - public destroy: sinon.SinonStub = sinon.stub(); - public onError: sinon.SinonStub = sinon.stub(); - public onEntry: sinon.SinonStub = sinon.stub(); - public onEnd: sinon.SinonStub = sinon.stub(); -} - -export class TestSyncReader { - public read: sinon.SinonStub = sinon.stub(); -} diff --git a/node_modules/@nodelib/fs.walk/src/types/index.ts b/node_modules/@nodelib/fs.walk/src/types/index.ts deleted file mode 100644 index 3dba47a9..00000000 --- a/node_modules/@nodelib/fs.walk/src/types/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import * as scandir from '@nodelib/fs.scandir'; - -export type Entry = scandir.Entry; -export type Errno = NodeJS.ErrnoException; - -export type QueueItem = { - directory: string; - base?: string; -}; diff --git a/node_modules/@nodelib/fs.walk/tsconfig.json b/node_modules/@nodelib/fs.walk/tsconfig.json deleted file mode 100644 index 3202b34f..00000000 --- a/node_modules/@nodelib/fs.walk/tsconfig.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "extends": "../../../tsconfig.json", - "compilerOptions": { - "rootDir": "src", - "outDir": "out" - }, - "references": [ - { - "path": "../fs.macchiato" - }, - { - "path": "../fs.scandir" - } - ] -} diff --git a/node_modules/@nodelib/fs.walk/tsconfig.tsbuildinfo b/node_modules/@nodelib/fs.walk/tsconfig.tsbuildinfo deleted file mode 100644 index 2c9a1265..00000000 --- a/node_modules/@nodelib/fs.walk/tsconfig.tsbuildinfo +++ /dev/null @@ -1,1894 +0,0 @@ -{ - "program": { - "fileInfos": { - "../../../node_modules/typescript/lib/lib.es5.d.ts": { - "version": "70ae6416528e68c2ee7b62892200d2ca631759943d4429f8b779b947ff1e124d", - "signature": "70ae6416528e68c2ee7b62892200d2ca631759943d4429f8b779b947ff1e124d", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.d.ts": { - "version": "dc47c4fa66b9b9890cf076304de2a9c5201e94b740cffdf09f87296d877d71f6", - "signature": "dc47c4fa66b9b9890cf076304de2a9c5201e94b740cffdf09f87296d877d71f6", - "affectsGlobalScope": false - }, - "../../../node_modules/typescript/lib/lib.es2016.d.ts": { - "version": "7a387c58583dfca701b6c85e0adaf43fb17d590fb16d5b2dc0a2fbd89f35c467", - "signature": "7a387c58583dfca701b6c85e0adaf43fb17d590fb16d5b2dc0a2fbd89f35c467", - "affectsGlobalScope": false - }, - "../../../node_modules/typescript/lib/lib.es2017.d.ts": { - "version": "8a12173c586e95f4433e0c6dc446bc88346be73ffe9ca6eec7aa63c8f3dca7f9", - "signature": "8a12173c586e95f4433e0c6dc446bc88346be73ffe9ca6eec7aa63c8f3dca7f9", - "affectsGlobalScope": false - }, - "../../../node_modules/typescript/lib/lib.es2018.d.ts": { - "version": "5f4e733ced4e129482ae2186aae29fde948ab7182844c3a5a51dd346182c7b06", - "signature": "5f4e733ced4e129482ae2186aae29fde948ab7182844c3a5a51dd346182c7b06", - "affectsGlobalScope": false - }, - "../../../node_modules/typescript/lib/lib.dom.d.ts": { - "version": "9affb0a2ddc57df5b8174c0af96c288d697a262e5bc9ca1f544c999dc64a91e6", - "signature": "9affb0a2ddc57df5b8174c0af96c288d697a262e5bc9ca1f544c999dc64a91e6", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.dom.iterable.d.ts": { - "version": "fb0c09b697dc42afa84d1587e3c994a2f554d2a45635e4f0618768d16a86b69a", - "signature": "fb0c09b697dc42afa84d1587e3c994a2f554d2a45635e4f0618768d16a86b69a", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.webworker.importscripts.d.ts": { - "version": "7fac8cb5fc820bc2a59ae11ef1c5b38d3832c6d0dfaec5acdb5569137d09a481", - "signature": "7fac8cb5fc820bc2a59ae11ef1c5b38d3832c6d0dfaec5acdb5569137d09a481", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.scripthost.d.ts": { - "version": "097a57355ded99c68e6df1b738990448e0bf170e606707df5a7c0481ff2427cd", - "signature": "097a57355ded99c68e6df1b738990448e0bf170e606707df5a7c0481ff2427cd", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.core.d.ts": { - "version": "63e0cc12d0f77394094bd19e84464f9840af0071e5b9358ced30511efef1d8d2", - "signature": "63e0cc12d0f77394094bd19e84464f9840af0071e5b9358ced30511efef1d8d2", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.collection.d.ts": { - "version": "43fb1d932e4966a39a41b464a12a81899d9ae5f2c829063f5571b6b87e6d2f9c", - "signature": "43fb1d932e4966a39a41b464a12a81899d9ae5f2c829063f5571b6b87e6d2f9c", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.generator.d.ts": { - "version": "cdccba9a388c2ee3fd6ad4018c640a471a6c060e96f1232062223063b0a5ac6a", - "signature": "cdccba9a388c2ee3fd6ad4018c640a471a6c060e96f1232062223063b0a5ac6a", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.iterable.d.ts": { - "version": "42f5e41e5893da663dbf0394268f54f1da4b43dc0ddd2ea4bf471fe5361d6faf", - "signature": "42f5e41e5893da663dbf0394268f54f1da4b43dc0ddd2ea4bf471fe5361d6faf", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.promise.d.ts": { - "version": "0b7a905675e6cb4211c128f0a3aa47d414b275180a299a9aad5d3ec298abbfc4", - "signature": "0b7a905675e6cb4211c128f0a3aa47d414b275180a299a9aad5d3ec298abbfc4", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.proxy.d.ts": { - "version": "dfff68b3c34338f6b307a25d4566de15eed7973b0dc5d69f9fde2bcac1c25315", - "signature": "dfff68b3c34338f6b307a25d4566de15eed7973b0dc5d69f9fde2bcac1c25315", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.reflect.d.ts": { - "version": "cb609802a8698aa28b9c56331d4b53f590ca3c1c3a255350304ae3d06017779d", - "signature": "cb609802a8698aa28b9c56331d4b53f590ca3c1c3a255350304ae3d06017779d", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.symbol.d.ts": { - "version": "3013574108c36fd3aaca79764002b3717da09725a36a6fc02eac386593110f93", - "signature": "3013574108c36fd3aaca79764002b3717da09725a36a6fc02eac386593110f93", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2015.symbol.wellknown.d.ts": { - "version": "4670208dd7da9d6c774ab1b75c1527a810388c7989c4905de6aaea8561cb9dce", - "signature": "4670208dd7da9d6c774ab1b75c1527a810388c7989c4905de6aaea8561cb9dce", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2016.array.include.d.ts": { - "version": "3be5a1453daa63e031d266bf342f3943603873d890ab8b9ada95e22389389006", - "signature": "3be5a1453daa63e031d266bf342f3943603873d890ab8b9ada95e22389389006", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.object.d.ts": { - "version": "17bb1fc99591b00515502d264fa55dc8370c45c5298f4a5c2083557dccba5a2a", - "signature": "17bb1fc99591b00515502d264fa55dc8370c45c5298f4a5c2083557dccba5a2a", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.sharedmemory.d.ts": { - "version": "d0db416bccdb33975548baf09a42ee8c47eace1aac7907351a000f1e568e7232", - "signature": "d0db416bccdb33975548baf09a42ee8c47eace1aac7907351a000f1e568e7232", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.string.d.ts": { - "version": "6a6b173e739a6a99629a8594bfb294cc7329bfb7b227f12e1f7c11bc163b8577", - "signature": "6a6b173e739a6a99629a8594bfb294cc7329bfb7b227f12e1f7c11bc163b8577", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.intl.d.ts": { - "version": "12a310447c5d23c7d0d5ca2af606e3bd08afda69100166730ab92c62999ebb9d", - "signature": "12a310447c5d23c7d0d5ca2af606e3bd08afda69100166730ab92c62999ebb9d", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.typedarrays.d.ts": { - "version": "b0124885ef82641903d232172577f2ceb5d3e60aed4da1153bab4221e1f6dd4e", - "signature": "b0124885ef82641903d232172577f2ceb5d3e60aed4da1153bab4221e1f6dd4e", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.asyncgenerator.d.ts": { - "version": "0eb85d6c590b0d577919a79e0084fa1744c1beba6fd0d4e951432fa1ede5510a", - "signature": "0eb85d6c590b0d577919a79e0084fa1744c1beba6fd0d4e951432fa1ede5510a", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.asynciterable.d.ts": { - "version": "a40c4d82bf13fcded295ac29f354eb7d40249613c15e07b53f2fc75e45e16359", - "signature": "a40c4d82bf13fcded295ac29f354eb7d40249613c15e07b53f2fc75e45e16359", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.intl.d.ts": { - "version": "df9c8a72ca8b0ed62f5470b41208a0587f0f73f0a7db28e5a1272cf92537518e", - "signature": "df9c8a72ca8b0ed62f5470b41208a0587f0f73f0a7db28e5a1272cf92537518e", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.promise.d.ts": { - "version": "bb2d3fb05a1d2ffbca947cc7cbc95d23e1d053d6595391bd325deb265a18d36c", - "signature": "bb2d3fb05a1d2ffbca947cc7cbc95d23e1d053d6595391bd325deb265a18d36c", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2018.regexp.d.ts": { - "version": "c80df75850fea5caa2afe43b9949338ce4e2de086f91713e9af1a06f973872b8", - "signature": "c80df75850fea5caa2afe43b9949338ce4e2de086f91713e9af1a06f973872b8", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2020.bigint.d.ts": { - "version": "4f435f794b7853c55e2ae7cff6206025802aa79232d2867544178f2ca8ff5eaa", - "signature": "4f435f794b7853c55e2ae7cff6206025802aa79232d2867544178f2ca8ff5eaa", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.esnext.intl.d.ts": { - "version": "89bf2b7a601b73ea4311eda9c41f86a58994fec1bee3b87c4a14d68d9adcdcbd", - "signature": "89bf2b7a601b73ea4311eda9c41f86a58994fec1bee3b87c4a14d68d9adcdcbd", - "affectsGlobalScope": true - }, - "../../../node_modules/typescript/lib/lib.es2017.full.d.ts": { - "version": "d2f31f19e1ba6ed59be9259d660a239d9a3fcbbc8e038c6b2009bde34b175fed", - "signature": "d2f31f19e1ba6ed59be9259d660a239d9a3fcbbc8e038c6b2009bde34b175fed", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/globals.d.ts": { - "version": "74d61a149bea97a20b324410e4520796ffc36dcf35b54f03cfd0cfe922bb61cc", - "signature": "74d61a149bea97a20b324410e4520796ffc36dcf35b54f03cfd0cfe922bb61cc", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/node/async_hooks.d.ts": { - "version": "950e73fe3bcda768b5f593cec3f7137bb7cab709a82be89dd08c2a20568a28e2", - "signature": "950e73fe3bcda768b5f593cec3f7137bb7cab709a82be89dd08c2a20568a28e2", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/buffer.d.ts": { - "version": "61215c1a376bbe8f51cab4cc4ddbf3746387015113c37a84d981d4738c21b878", - "signature": "61215c1a376bbe8f51cab4cc4ddbf3746387015113c37a84d981d4738c21b878", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/child_process.d.ts": { - "version": "5eca801fb67009c5728b88793670f0137b5e31a8f7d1576d5110a1276feaba8c", - "signature": "5eca801fb67009c5728b88793670f0137b5e31a8f7d1576d5110a1276feaba8c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/cluster.d.ts": { - "version": "ce629710e5e58724902b753212e97861fd73e2aa09f5d88cb6d55dc763cf8c8a", - "signature": "ce629710e5e58724902b753212e97861fd73e2aa09f5d88cb6d55dc763cf8c8a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/console.d.ts": { - "version": "525c8fc510d9632d2a0a9de2d41c3ac1cdd79ff44d3b45c6d81cacabb683528d", - "signature": "525c8fc510d9632d2a0a9de2d41c3ac1cdd79ff44d3b45c6d81cacabb683528d", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/constants.d.ts": { - "version": "0279383034fae92db8097d0a41350293553599cc9c3c917b60e2542d0dfcbd44", - "signature": "0279383034fae92db8097d0a41350293553599cc9c3c917b60e2542d0dfcbd44", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/crypto.d.ts": { - "version": "9b9f8b151698fb1798f04b8375e240c764f094e730192e6a5353abdb1c709d6f", - "signature": "9b9f8b151698fb1798f04b8375e240c764f094e730192e6a5353abdb1c709d6f", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/dgram.d.ts": { - "version": "37c4598a5f2025c97492e18bed8909ccd10bf26bb5f54d5f6009f9153291af91", - "signature": "37c4598a5f2025c97492e18bed8909ccd10bf26bb5f54d5f6009f9153291af91", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/dns.d.ts": { - "version": "ef226a42de7022eacdfa0f15aabf73b46c47af93044c8ebfab8aa8e3cf6c330c", - "signature": "ef226a42de7022eacdfa0f15aabf73b46c47af93044c8ebfab8aa8e3cf6c330c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/domain.d.ts": { - "version": "d5b7c8819ce1bd31a45f7675309e145ec28e3aa1b60a8e0637fd0e8916255baa", - "signature": "d5b7c8819ce1bd31a45f7675309e145ec28e3aa1b60a8e0637fd0e8916255baa", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/events.d.ts": { - "version": "76048f3c7325a6c1fa6306d40eb0c8570fa0209d09472d46f9b1221f66edae6f", - "signature": "76048f3c7325a6c1fa6306d40eb0c8570fa0209d09472d46f9b1221f66edae6f", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/fs.d.ts": { - "version": "03be37150cc8fe48fd243169653f15149e0ed4a34eea0cae027b708d39eb01f8", - "signature": "03be37150cc8fe48fd243169653f15149e0ed4a34eea0cae027b708d39eb01f8", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/http.d.ts": { - "version": "f50ba0d2d8f891fa11326db36e6c25fe14bce747cf2bd9b554de3bb2a814f49c", - "signature": "f50ba0d2d8f891fa11326db36e6c25fe14bce747cf2bd9b554de3bb2a814f49c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/http2.d.ts": { - "version": "48b53111cc4ce136803fbf857cd8de2d5df33895b1af714a87caf87562182e46", - "signature": "48b53111cc4ce136803fbf857cd8de2d5df33895b1af714a87caf87562182e46", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/https.d.ts": { - "version": "dacbe08610729f6343ea9880ea8e737c6d7a6efa4a318d8f6acaf85db4aceed6", - "signature": "dacbe08610729f6343ea9880ea8e737c6d7a6efa4a318d8f6acaf85db4aceed6", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/inspector.d.ts": { - "version": "4218ced3933a31eed1278d350dd63c5900df0f0904f57d61c054d7a4b83dbe4c", - "signature": "4218ced3933a31eed1278d350dd63c5900df0f0904f57d61c054d7a4b83dbe4c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/module.d.ts": { - "version": "03394bf8deb8781b490ae9266a843fbdf00647947d79e25fcbf1d89a9e9c8a66", - "signature": "03394bf8deb8781b490ae9266a843fbdf00647947d79e25fcbf1d89a9e9c8a66", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/net.d.ts": { - "version": "358398fe4034395d85c87c319cca7a04001434b13dc68d067481ecb374385bfc", - "signature": "358398fe4034395d85c87c319cca7a04001434b13dc68d067481ecb374385bfc", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/os.d.ts": { - "version": "d9bc6f1917c24d862a68d2633e4a32fd586bfe3e412e5d11fd07d8266b94ced5", - "signature": "d9bc6f1917c24d862a68d2633e4a32fd586bfe3e412e5d11fd07d8266b94ced5", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/path.d.ts": { - "version": "5fb30076f0e0e5744db8993648bfb67aadd895f439edad5cce039127a87a8a36", - "signature": "5fb30076f0e0e5744db8993648bfb67aadd895f439edad5cce039127a87a8a36", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/perf_hooks.d.ts": { - "version": "93a8a589862b5ac8fd8bb46426f7b081ba825a5171337dd45de9bf141624d55e", - "signature": "93a8a589862b5ac8fd8bb46426f7b081ba825a5171337dd45de9bf141624d55e", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/process.d.ts": { - "version": "0e0d58f5e90c0a270dac052b9c5ad8ccdfc8271118c2105b361063218d528d6e", - "signature": "0e0d58f5e90c0a270dac052b9c5ad8ccdfc8271118c2105b361063218d528d6e", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/node/punycode.d.ts": { - "version": "3f6a1fd73c9dc3bd7f4b79bc075297ca6527904df69b0f2c2c94e4c4c7d9a32c", - "signature": "3f6a1fd73c9dc3bd7f4b79bc075297ca6527904df69b0f2c2c94e4c4c7d9a32c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/querystring.d.ts": { - "version": "758948c06a0d02623c7d4ed357ffa79bdc170de6e004046678774a1bfa9a29bb", - "signature": "758948c06a0d02623c7d4ed357ffa79bdc170de6e004046678774a1bfa9a29bb", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/readline.d.ts": { - "version": "2ca26a43dec700c4b0bdc04b123094f4becffda70e3960f3e10b025f7a15ba8f", - "signature": "2ca26a43dec700c4b0bdc04b123094f4becffda70e3960f3e10b025f7a15ba8f", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/repl.d.ts": { - "version": "27c3f3f672a6ce267f7cc34643231032016fa4b6d195c0725db570de0a7a9f91", - "signature": "27c3f3f672a6ce267f7cc34643231032016fa4b6d195c0725db570de0a7a9f91", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/stream.d.ts": { - "version": "9c581919a8c483f5080487ae8ec1dd398d94027aedf8e77436085e7fab23951a", - "signature": "9c581919a8c483f5080487ae8ec1dd398d94027aedf8e77436085e7fab23951a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/string_decoder.d.ts": { - "version": "7e62aac2cc9c0710d772047ad89e8d7117f52592c791eb995ce1f865fedab432", - "signature": "7e62aac2cc9c0710d772047ad89e8d7117f52592c791eb995ce1f865fedab432", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/timers.d.ts": { - "version": "b40652bf8ce4a18133b31349086523b219724dca8df3448c1a0742528e7ad5b9", - "signature": "b40652bf8ce4a18133b31349086523b219724dca8df3448c1a0742528e7ad5b9", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/tls.d.ts": { - "version": "48064f81a8354d04808e7b5bddf570aaf19f894cf1d8a2aa1f56c81decd33508", - "signature": "48064f81a8354d04808e7b5bddf570aaf19f894cf1d8a2aa1f56c81decd33508", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/trace_events.d.ts": { - "version": "a77fdb357c78b70142b2fdbbfb72958d69e8f765fd2a3c69946c1018e89d4638", - "signature": "a77fdb357c78b70142b2fdbbfb72958d69e8f765fd2a3c69946c1018e89d4638", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/tty.d.ts": { - "version": "3c2ac350c3baa61fd2b1925844109e098f4376d0768a4643abc82754fd752748", - "signature": "3c2ac350c3baa61fd2b1925844109e098f4376d0768a4643abc82754fd752748", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/url.d.ts": { - "version": "834545a7726e414890371aec1a89b7915963e08e790e093259e8bed429ef15c6", - "signature": "834545a7726e414890371aec1a89b7915963e08e790e093259e8bed429ef15c6", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/util.d.ts": { - "version": "b248fb69886bce18cf6650491f43f0326ed6d59c8fdf7fd63dbd35bf4ef3e2bc", - "signature": "b248fb69886bce18cf6650491f43f0326ed6d59c8fdf7fd63dbd35bf4ef3e2bc", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/v8.d.ts": { - "version": "4407bd5f1d6f748590ba125195eb1d7a003c2de2f3b057456d3ac76a742d2561", - "signature": "4407bd5f1d6f748590ba125195eb1d7a003c2de2f3b057456d3ac76a742d2561", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/vm.d.ts": { - "version": "2b57b7d7191c6e2efc2ed4f87cf1e25c383278ac5d019670406508df42dc34f3", - "signature": "2b57b7d7191c6e2efc2ed4f87cf1e25c383278ac5d019670406508df42dc34f3", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/worker_threads.d.ts": { - "version": "46f0413ecc0d83b047d46dbe03a37c7c760f59f0bb9a8633150e2d9335870675", - "signature": "46f0413ecc0d83b047d46dbe03a37c7c760f59f0bb9a8633150e2d9335870675", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/zlib.d.ts": { - "version": "2ea98f43cfae8dfbefc45d8bd1ec4907bbad33d18203ea8ef8b50d36b97afa35", - "signature": "2ea98f43cfae8dfbefc45d8bd1ec4907bbad33d18203ea8ef8b50d36b97afa35", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/ts3.3/base.d.ts": { - "version": "067b1964df87a4fc98ebffbd2bada6d7ed14a5b032f9071ea39478d82e701a99", - "signature": "067b1964df87a4fc98ebffbd2bada6d7ed14a5b032f9071ea39478d82e701a99", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/globals.global.d.ts": { - "version": "2708349d5a11a5c2e5f3a0765259ebe7ee00cdcc8161cb9990cb4910328442a1", - "signature": "2708349d5a11a5c2e5f3a0765259ebe7ee00cdcc8161cb9990cb4910328442a1", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/node/wasi.d.ts": { - "version": "14a6a3cee450438254c004a6b4f1191ec9977186bdeda07764f2a8d90ef71117", - "signature": "14a6a3cee450438254c004a6b4f1191ec9977186bdeda07764f2a8d90ef71117", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/ts3.6/base.d.ts": { - "version": "d170ea32762c00c660740f2cc0ca9526290ab9d9fb9c72282c1fa53cd1a7728e", - "signature": "d170ea32762c00c660740f2cc0ca9526290ab9d9fb9c72282c1fa53cd1a7728e", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/assert.d.ts": { - "version": "54b2276780dc8d538a71b954b87ea081a1e9f90e7f1195f2daf2bddde0bf52df", - "signature": "54b2276780dc8d538a71b954b87ea081a1e9f90e7f1195f2daf2bddde0bf52df", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/base.d.ts": { - "version": "e61a21e9418f279bc480394a94d1581b2dee73747adcbdef999b6737e34d721b", - "signature": "e61a21e9418f279bc480394a94d1581b2dee73747adcbdef999b6737e34d721b", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/node/index.d.ts": { - "version": "6fa68653382bd571bc63831e9f9c1307cc52f7310c1470463fe429d84147667d", - "signature": "6fa68653382bd571bc63831e9f9c1307cc52f7310c1470463fe429d84147667d", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/minimatch/index.d.ts": { - "version": "1d1e6bd176eee5970968423d7e215bfd66828b6db8d54d17afec05a831322633", - "signature": "1d1e6bd176eee5970968423d7e215bfd66828b6db8d54d17afec05a831322633", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/glob/index.d.ts": { - "version": "393137c76bd922ba70a2f8bf1ade4f59a16171a02fb25918c168d48875b0cfb0", - "signature": "393137c76bd922ba70a2f8bf1ade4f59a16171a02fb25918c168d48875b0cfb0", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/rimraf/index.d.ts": { - "version": "6462324ef579c47415610a63f1aa8b72f5b5114f8fe8307967f9add2bca634f5", - "signature": "6462324ef579c47415610a63f1aa8b72f5b5114f8fe8307967f9add2bca634f5", - "affectsGlobalScope": false - }, - "../fs.scandir/out/adapters/fs.d.ts": { - "version": "6cee571d9f8ea88d197d1614e5cf328391e96dff69ca61ab5299a630d0c65822", - "signature": "6cee571d9f8ea88d197d1614e5cf328391e96dff69ca61ab5299a630d0c65822", - "affectsGlobalScope": false - }, - "../fs.stat/out/adapters/fs.d.ts": { - "version": "ceebf93146ac7b3f85276a2501de57c5cf5bb19742944c958bd831f995b41409", - "signature": "ceebf93146ac7b3f85276a2501de57c5cf5bb19742944c958bd831f995b41409", - "affectsGlobalScope": false - }, - "../fs.stat/out/settings.d.ts": { - "version": "30c47bd1f03a220a10e8c11708a2c73c04135999ca1a35271605f9683d36b432", - "signature": "30c47bd1f03a220a10e8c11708a2c73c04135999ca1a35271605f9683d36b432", - "affectsGlobalScope": false - }, - "../fs.stat/out/types/index.d.ts": { - "version": "8b9fa6dfb2bec7abe9937fe049505d896550b2ad600cb7114b6fe2813b5cf180", - "signature": "8b9fa6dfb2bec7abe9937fe049505d896550b2ad600cb7114b6fe2813b5cf180", - "affectsGlobalScope": false - }, - "../fs.stat/out/providers/async.d.ts": { - "version": "c692034610ac35559227657172f6f76581ee7b16c319c7d5973e19b650f11b9f", - "signature": "c692034610ac35559227657172f6f76581ee7b16c319c7d5973e19b650f11b9f", - "affectsGlobalScope": false - }, - "../fs.stat/out/index.d.ts": { - "version": "079488cc4bf1eef64297994ef8719c078a86380610beea1d1a920c9436997967", - "signature": "079488cc4bf1eef64297994ef8719c078a86380610beea1d1a920c9436997967", - "affectsGlobalScope": false - }, - "../fs.scandir/out/settings.d.ts": { - "version": "354b18c096926d3f35b597b6309d9792b6517840bbf972c095930d067e28de83", - "signature": "354b18c096926d3f35b597b6309d9792b6517840bbf972c095930d067e28de83", - "affectsGlobalScope": false - }, - "../fs.scandir/out/types/index.d.ts": { - "version": "a86bb35bb7b946058e544aba4a7ac5359648d58904184701f1ca23b26b36ccd3", - "signature": "a86bb35bb7b946058e544aba4a7ac5359648d58904184701f1ca23b26b36ccd3", - "affectsGlobalScope": false - }, - "../fs.scandir/out/providers/async.d.ts": { - "version": "fa316e5f457f8a5ef600f305a60cb0031f0cf07c7c6991b520d353739f56c6ad", - "signature": "fa316e5f457f8a5ef600f305a60cb0031f0cf07c7c6991b520d353739f56c6ad", - "affectsGlobalScope": false - }, - "../fs.scandir/out/index.d.ts": { - "version": "105b0c9e3b159a1f32ca33bf1cb5ead919ea15bab7143bd99b298ceb687b0e84", - "signature": "105b0c9e3b159a1f32ca33bf1cb5ead919ea15bab7143bd99b298ceb687b0e84", - "affectsGlobalScope": false - }, - "./src/types/index.ts": { - "version": "6a96e278bdce17d05b4f37250085483a80729a2158c7a867c408c84381028faa", - "signature": "e336f6c501a424c6e016b3244b094d5924d06b320cb659aaa0adb647ab5a9fd6", - "affectsGlobalScope": false - }, - "../../../node_modules/fastq/index.d.ts": { - "version": "f34118dde11e1e6b2402d7e250e6864b0705ec609c4a8cb6e7a64e94bc3800d1", - "signature": "f34118dde11e1e6b2402d7e250e6864b0705ec609c4a8cb6e7a64e94bc3800d1", - "affectsGlobalScope": false - }, - "./src/settings.ts": { - "version": "61f27272c17f7fc99cac0143e912a54f8b9f5e5123e88d3557336ac8d4187016", - "signature": "07924b2c69a8f7019f242fbb1d9363b98731456d8afc791fc0622c91f60598b7", - "affectsGlobalScope": false - }, - "./src/readers/common.ts": { - "version": "32f39c761407b1bf9e1eb841c52f8a55b619933a095789a4cf119d98b54afdf9", - "signature": "a19646eafd0213927fb4c08e69088bebf265b3c16726e8cacaa810b35d0dbeb5", - "affectsGlobalScope": false - }, - "./src/readers/reader.ts": { - "version": "6c46b344594d6870286dde196a95e72e3d63ed61d02a301cddd5a935ec5058d9", - "signature": "b8576d89cac43fe52489662c1218b463cba5197c3bec501e2832f5663327838d", - "affectsGlobalScope": false - }, - "./src/readers/async.ts": { - "version": "c7e216abc4caaf7ca77092e124dc378ff3af3143d246d3ba6182a445423c8109", - "signature": "e7a91ba8f4fa3f6d166235d9c94fc01d3b055fc4a16bc18dcf0c5c1e0491f22b", - "affectsGlobalScope": false - }, - "./src/providers/async.ts": { - "version": "b4666d83483f607e7dacbac5510cf9e108d0d38a79dda411d00857b20614f504", - "signature": "51d7fcd7672c0f53f01c84e5ea8ef5f3947e8a385807de7eecdc0b7464c2dd68", - "affectsGlobalScope": false - }, - "./src/providers/stream.ts": { - "version": "978511ca1df883f878315565dc638a6b0784b9c929d0103dc62c4c41024a1e10", - "signature": "13adf8b7a2325d87ea12edbb2081c0a81a06c575fb3d6c583fd9fc87113e7731", - "affectsGlobalScope": false - }, - "./src/readers/sync.ts": { - "version": "521e4d1fb8fb7de21cf7b599a9fd7ea0e6d747c8914cc7119b4a6b3756ff27d1", - "signature": "a7d40806b1d5aa763d2c75d00e5d6c3050f70e37ae2ca7f8856c48803d7a7f71", - "affectsGlobalScope": false - }, - "./src/providers/sync.ts": { - "version": "a984e8ab39513cc15581fabe10d543e80c265000dabbaf5ff06f7edacd509b86", - "signature": "56c12b46bc3bb96c824c0f7c7fe4a032f8dfe4ee77932ad78be7a5cf83eca366", - "affectsGlobalScope": false - }, - "./src/index.ts": { - "version": "79a628cf15fdf18ee685eaa0b75066cac2dfa18f80826336f2561e25530426f6", - "signature": "96892cc7ee950d2bbded6dc7f03ff50a719df54f6529aa938ce3854fef04ee2a", - "affectsGlobalScope": false - }, - "./src/index.spec.ts": { - "version": "c159a9d372124a9f22b0cb9dd242b5354d4c8e7f6f4602792d92b75a87d71605", - "signature": "a900cdf2c35bba00b0363cc950bbf88b887976e70a9eae929dad35ef964109d9", - "affectsGlobalScope": false - }, - "./src/settings.spec.ts": { - "version": "f68c3c54888592c789941dc2d6a7145a054dfdc359d8f806bb8aa37369b18bda", - "signature": "714d2bb322e0442caf181768f049abd17a96d328d87169c2e6c13a86839c4463", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/sinon/ts3.1/index.d.ts": { - "version": "168435ab3390620aebf1aa0001b380983582d0849755eeb17f2c501d1fc57587", - "signature": "168435ab3390620aebf1aa0001b380983582d0849755eeb17f2c501d1fc57587", - "affectsGlobalScope": false - }, - "../fs.macchiato/out/types.d.ts": { - "version": "47b605d1e61f92f418c7879051e5458f8ec00aeacac419a37754066fec42b9ba", - "signature": "47b605d1e61f92f418c7879051e5458f8ec00aeacac419a37754066fec42b9ba", - "affectsGlobalScope": false - }, - "../fs.macchiato/out/dirent.d.ts": { - "version": "98387ef539ccca1023a5934f6ea2dd87ee8a6c87db31ec7986b9da016c66fc16", - "signature": "98387ef539ccca1023a5934f6ea2dd87ee8a6c87db31ec7986b9da016c66fc16", - "affectsGlobalScope": false - }, - "../fs.macchiato/out/stats.d.ts": { - "version": "7c70ba0c69002f78ddac880f0096de5b0e78248cf680c1fe1a89439dbf069c5d", - "signature": "7c70ba0c69002f78ddac880f0096de5b0e78248cf680c1fe1a89439dbf069c5d", - "affectsGlobalScope": false - }, - "../fs.macchiato/out/index.d.ts": { - "version": "e00937f585b9c2f95d9d4e00b4e76427eb9516c70a4470d805451ba2ea00044e", - "signature": "e00937f585b9c2f95d9d4e00b4e76427eb9516c70a4470d805451ba2ea00044e", - "affectsGlobalScope": false - }, - "./src/tests/index.ts": { - "version": "22b49bb57a9b554cbca55662d2e04f8a4285ef30ed58aaea4e5826b153a60d5b", - "signature": "f5bbb15da84f1ca0d672ab2f502188c214936713d1952885f299e63fa9e2b3de", - "affectsGlobalScope": false - }, - "./src/providers/async.spec.ts": { - "version": "6f0dd7b1ef7cd2ca78ed2f954047e3c6a9df5bc37329869f6211c9a6ca195b08", - "signature": "6ff501c2b9280fbf7322044c48dff6eea6849df3b6ab6844facd9d789988a2c9", - "affectsGlobalScope": false - }, - "./src/providers/index.ts": { - "version": "16e0fc0a24f33533156b270c2ac70e822fa952ef4a305db8248798a6d0ae539f", - "signature": "a4f97dec3015c794e10ffd88ee8832db66f667c51d49eb5d69b49b0e1efc410c", - "affectsGlobalScope": false - }, - "./src/providers/stream.spec.ts": { - "version": "3dfb335689cb23c892fec86351d885e642a68e346e5856cde087b46c74942097", - "signature": "91e8c9343cbf6795565dc7a86b811e9a9742d611faed3f588c2da40dccea14e3", - "affectsGlobalScope": false - }, - "./src/providers/sync.spec.ts": { - "version": "0f551e41ebec0b876ee7874bc7aac28f1644f4fdc73e9124116a80bbcee49190", - "signature": "be22d8b5a836edfac7c9c5ef03e98058ec89f0b98edef8e54ea410187b0bda28", - "affectsGlobalScope": false - }, - "./src/readers/async.spec.ts": { - "version": "a08e84ab63d51e9a4dfcfde0ed356aadb19a1fbbe6c48028be0fb1a189ca61f4", - "signature": "6ff501c2b9280fbf7322044c48dff6eea6849df3b6ab6844facd9d789988a2c9", - "affectsGlobalScope": false - }, - "./src/readers/common.spec.ts": { - "version": "cc13401e9c9d385fe793a35934d1b5f095707709985fccf28c29192c4f88b4d2", - "signature": "d030e48e296ebe726e80c9b6e26988c7d9a5c1dba321a5f03d7ce297f130aeae", - "affectsGlobalScope": false - }, - "./src/readers/reader.spec.ts": { - "version": "c337c2f503119ded6fa003a726da62a3c060c29ac61d0dff5d8190a7fc2394fc", - "signature": "e57ffc6520bdbd5a218c9b963087784eb84d6a96473c82b438b85628455e7827", - "affectsGlobalScope": false - }, - "./src/readers/sync.spec.ts": { - "version": "8c863d867daba979948c0e00bb270bfefbf769322a12c574871d84b1317fc1c1", - "signature": "be22d8b5a836edfac7c9c5ef03e98058ec89f0b98edef8e54ea410187b0bda28", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/eslint-visitor-keys/index.d.ts": { - "version": "725d9be2fd48440256f4deb00649adffdbc5ecd282b09e89d4e200663792c34c", - "signature": "725d9be2fd48440256f4deb00649adffdbc5ecd282b09e89d4e200663792c34c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/fs-extra/index.d.ts": { - "version": "aca36e2d27783f4bad7fc1786a532ff76024f0fc8575df48bcd9a5eb452fe7e7", - "signature": "aca36e2d27783f4bad7fc1786a532ff76024f0fc8575df48bcd9a5eb452fe7e7", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/highlight.js/index.d.ts": { - "version": "21a2fa3722dc0baba2649e040c3121eb38ce84f5afe35ff1c20276132eaa2f2c", - "signature": "21a2fa3722dc0baba2649e040c3121eb38ce84f5afe35ff1c20276132eaa2f2c", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/json-schema/index.d.ts": { - "version": "b2be568d8ce95fcb26eebd04c035d94825655fdf689bf67d799f5ff8cbbb1024", - "signature": "b2be568d8ce95fcb26eebd04c035d94825655fdf689bf67d799f5ff8cbbb1024", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/common.d.ts": { - "version": "3594c022901a1c8993b0f78a3f534cfb81e7b619ed215348f7f6882f3db02abc", - "signature": "3594c022901a1c8993b0f78a3f534cfb81e7b619ed215348f7f6882f3db02abc", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/array.d.ts": { - "version": "d03a1ae3d39f757c9f22e4e775b940a98d86bb50ec85529b59e32a17b65c2b90", - "signature": "d03a1ae3d39f757c9f22e4e775b940a98d86bb50ec85529b59e32a17b65c2b90", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/collection.d.ts": { - "version": "0c75b204aed9cf6ff1c7b4bed87a3ece0d9d6fc857a6350c0c95ed0c38c814e8", - "signature": "0c75b204aed9cf6ff1c7b4bed87a3ece0d9d6fc857a6350c0c95ed0c38c814e8", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/date.d.ts": { - "version": "187119ff4f9553676a884e296089e131e8cc01691c546273b1d0089c3533ce42", - "signature": "187119ff4f9553676a884e296089e131e8cc01691c546273b1d0089c3533ce42", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/function.d.ts": { - "version": "c9f396e71966bd3a890d8a36a6a497dbf260e9b868158ea7824d4b5421210afe", - "signature": "c9f396e71966bd3a890d8a36a6a497dbf260e9b868158ea7824d4b5421210afe", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/lang.d.ts": { - "version": "509235563ea2b939e1bbe92aae17e71e6a82ceab8f568b45fb4fce7d72523a32", - "signature": "509235563ea2b939e1bbe92aae17e71e6a82ceab8f568b45fb4fce7d72523a32", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/math.d.ts": { - "version": "9364c7566b0be2f7b70ff5285eb34686f83ccb01bda529b82d23b2a844653bfb", - "signature": "9364c7566b0be2f7b70ff5285eb34686f83ccb01bda529b82d23b2a844653bfb", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/number.d.ts": { - "version": "00baffbe8a2f2e4875367479489b5d43b5fc1429ecb4a4cc98cfc3009095f52a", - "signature": "00baffbe8a2f2e4875367479489b5d43b5fc1429ecb4a4cc98cfc3009095f52a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/object.d.ts": { - "version": "c311349ec71bb69399ffc4092853e7d8a86c1ca39ddb4cd129e775c19d985793", - "signature": "c311349ec71bb69399ffc4092853e7d8a86c1ca39ddb4cd129e775c19d985793", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/seq.d.ts": { - "version": "3c92b6dfd43cc1c2485d9eba5ff0b74a19bb8725b692773ef1d66dac48cda4bd", - "signature": "3c92b6dfd43cc1c2485d9eba5ff0b74a19bb8725b692773ef1d66dac48cda4bd", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/string.d.ts": { - "version": "4908e4c00832b26ce77a629de8501b0e23a903c094f9e79a7fec313a15da796a", - "signature": "4908e4c00832b26ce77a629de8501b0e23a903c094f9e79a7fec313a15da796a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/common/util.d.ts": { - "version": "2630a7cbb597e85d713b7ef47f2946d4280d3d4c02733282770741d40672b1a5", - "signature": "2630a7cbb597e85d713b7ef47f2946d4280d3d4c02733282770741d40672b1a5", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/lodash/index.d.ts": { - "version": "0714e2046df66c0e93c3330d30dbc0565b3e8cd3ee302cf99e4ede6220e5fec8", - "signature": "0714e2046df66c0e93c3330d30dbc0565b3e8cd3ee302cf99e4ede6220e5fec8", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/marked/index.d.ts": { - "version": "c08a5e873738f5576ae1ca5810b5ebc30509f05bde56c3a3bbdd75d6c0806e6a", - "signature": "c08a5e873738f5576ae1ca5810b5ebc30509f05bde56c3a3bbdd75d6c0806e6a", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/minimist/index.d.ts": { - "version": "e437d83044ba17246a861aa9691aa14223ff4a9d6f338ab1269c41c758586a88", - "signature": "e437d83044ba17246a861aa9691aa14223ff4a9d6f338ab1269c41c758586a88", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/mocha/index.d.ts": { - "version": "c4c03cf65951d980ba618ae9601d10438730803fc9c8a1f7b34af8739981e205", - "signature": "c4c03cf65951d980ba618ae9601d10438730803fc9c8a1f7b34af8739981e205", - "affectsGlobalScope": true - }, - "../../../node_modules/@types/normalize-package-data/index.d.ts": { - "version": "c9ad058b2cc9ce6dc2ed92960d6d009e8c04bef46d3f5312283debca6869f613", - "signature": "c9ad058b2cc9ce6dc2ed92960d6d009e8c04bef46d3f5312283debca6869f613", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/run-parallel/index.d.ts": { - "version": "eefea34ce2cdb15ab6678c8c7911c27b2c3da267d7922f192f3d2eb0bf621821", - "signature": "eefea34ce2cdb15ab6678c8c7911c27b2c3da267d7922f192f3d2eb0bf621821", - "affectsGlobalScope": false - }, - "../../../node_modules/@types/shelljs/index.d.ts": { - "version": "b73abc91e3166b1951d302f8008c17e62d32e570e71b2680141f7c3f5d0a990d", - "signature": "b73abc91e3166b1951d302f8008c17e62d32e570e71b2680141f7c3f5d0a990d", - "affectsGlobalScope": false - } - }, - "options": { - "target": 4, - "module": 1, - "moduleResolution": 2, - "strict": true, - "alwaysStrict": true, - "strictFunctionTypes": true, - "strictNullChecks": true, - "strictPropertyInitialization": true, - "forceConsistentCasingInFileNames": true, - "noImplicitAny": true, - "noImplicitReturns": true, - "noImplicitThis": true, - "noFallthroughCasesInSwitch": true, - "noUnusedLocals": true, - "noUnusedParameters": true, - "emitDecoratorMetadata": true, - "experimentalDecorators": true, - "downlevelIteration": true, - "composite": true, - "declaration": true, - "declarationMap": true, - "pretty": true, - "rootDir": "./src", - "outDir": "./out", - "configFilePath": "./tsconfig.json" - }, - "referencedMap": { - "../../../node_modules/@types/fs-extra/index.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/glob/index.d.ts": [ - "../../../node_modules/@types/minimatch/index.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/array.d.ts": [ - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/collection.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/common.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/date.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/function.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/lang.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/math.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/number.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/object.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/seq.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/string.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/util.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/index.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts" - ], - "../../../node_modules/@types/node/base.d.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/ts3.6/base.d.ts" - ], - "../../../node_modules/@types/node/child_process.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/cluster.d.ts": [ - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/crypto.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/dgram.d.ts": [ - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/domain.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/fs.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/http.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/http2.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/https.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/index.d.ts": [ - "../../../node_modules/@types/node/base.d.ts" - ], - "../../../node_modules/@types/node/inspector.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/net.d.ts": [ - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/perf_hooks.d.ts": [ - "../../../node_modules/@types/node/async_hooks.d.ts" - ], - "../../../node_modules/@types/node/process.d.ts": [ - "../../../node_modules/@types/node/tty.d.ts" - ], - "../../../node_modules/@types/node/readline.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/repl.d.ts": [ - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/vm.d.ts" - ], - "../../../node_modules/@types/node/stream.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/tls.d.ts": [ - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/ts3.3/base.d.ts": [ - "../../../node_modules/@types/node/async_hooks.d.ts", - "../../../node_modules/@types/node/buffer.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/cluster.d.ts", - "../../../node_modules/@types/node/console.d.ts", - "../../../node_modules/@types/node/constants.d.ts", - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dgram.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/domain.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/globals.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/http2.d.ts", - "../../../node_modules/@types/node/https.d.ts", - "../../../node_modules/@types/node/inspector.d.ts", - "../../../node_modules/@types/node/module.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/os.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/node/perf_hooks.d.ts", - "../../../node_modules/@types/node/process.d.ts", - "../../../node_modules/@types/node/punycode.d.ts", - "../../../node_modules/@types/node/querystring.d.ts", - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/repl.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/string_decoder.d.ts", - "../../../node_modules/@types/node/timers.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/trace_events.d.ts", - "../../../node_modules/@types/node/tty.d.ts", - "../../../node_modules/@types/node/url.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/v8.d.ts", - "../../../node_modules/@types/node/vm.d.ts", - "../../../node_modules/@types/node/worker_threads.d.ts", - "../../../node_modules/@types/node/zlib.d.ts" - ], - "../../../node_modules/@types/node/ts3.6/base.d.ts": [ - "../../../node_modules/@types/node/globals.global.d.ts", - "../../../node_modules/@types/node/ts3.3/base.d.ts", - "../../../node_modules/@types/node/wasi.d.ts" - ], - "../../../node_modules/@types/node/tty.d.ts": [ - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/url.d.ts": [ - "../../../node_modules/@types/node/querystring.d.ts" - ], - "../../../node_modules/@types/node/v8.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/worker_threads.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/vm.d.ts" - ], - "../../../node_modules/@types/node/zlib.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/rimraf/index.d.ts": [ - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/shelljs/index.d.ts": [ - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.macchiato/out/dirent.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../fs.macchiato/out/types.d.ts" - ], - "../fs.macchiato/out/index.d.ts": [ - "../fs.macchiato/out/dirent.d.ts", - "../fs.macchiato/out/stats.d.ts" - ], - "../fs.macchiato/out/stats.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../fs.macchiato/out/types.d.ts" - ], - "../fs.scandir/out/adapters/fs.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.scandir/out/index.d.ts": [ - "../fs.scandir/out/adapters/fs.d.ts", - "../fs.scandir/out/providers/async.d.ts", - "../fs.scandir/out/settings.d.ts", - "../fs.scandir/out/types/index.d.ts" - ], - "../fs.scandir/out/providers/async.d.ts": [ - "../../../node_modules/@types/node/index.d.ts", - "../fs.scandir/out/settings.d.ts", - "../fs.scandir/out/types/index.d.ts" - ], - "../fs.scandir/out/settings.d.ts": [ - "../fs.scandir/out/adapters/fs.d.ts", - "../fs.stat/out/index.d.ts" - ], - "../fs.scandir/out/types/index.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.stat/out/adapters/fs.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.stat/out/index.d.ts": [ - "../fs.stat/out/adapters/fs.d.ts", - "../fs.stat/out/providers/async.d.ts", - "../fs.stat/out/settings.d.ts", - "../fs.stat/out/types/index.d.ts" - ], - "../fs.stat/out/providers/async.d.ts": [ - "../fs.stat/out/settings.d.ts", - "../fs.stat/out/types/index.d.ts" - ], - "../fs.stat/out/settings.d.ts": [ - "../fs.stat/out/adapters/fs.d.ts" - ], - "../fs.stat/out/types/index.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "./src/index.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/rimraf/index.d.ts", - "./src/index.ts", - "./src/types/index.ts" - ], - "./src/index.ts": [ - "../../../node_modules/@types/node/stream.d.ts", - "../fs.scandir/out/index.d.ts", - "./src/providers/async.ts", - "./src/providers/stream.ts", - "./src/providers/sync.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/async.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "./src/providers/async.ts", - "./src/readers/async.ts", - "./src/settings.ts", - "./src/tests/index.ts" - ], - "./src/providers/async.ts": [ - "./src/readers/async.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/index.ts": [ - "./src/providers/async.ts", - "./src/providers/stream.ts", - "./src/providers/sync.ts" - ], - "./src/providers/stream.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "./src/providers/stream.ts", - "./src/readers/async.ts", - "./src/settings.ts", - "./src/tests/index.ts" - ], - "./src/providers/stream.ts": [ - "../../../node_modules/@types/node/stream.d.ts", - "./src/readers/async.ts", - "./src/settings.ts" - ], - "./src/providers/sync.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "./src/providers/sync.ts", - "./src/readers/sync.ts", - "./src/settings.ts", - "./src/tests/index.ts" - ], - "./src/providers/sync.ts": [ - "./src/readers/sync.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/readers/async.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "../fs.scandir/out/index.d.ts", - "./src/readers/async.ts", - "./src/settings.ts", - "./src/tests/index.ts", - "./src/types/index.ts" - ], - "./src/readers/async.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/fastq/index.d.ts", - "../fs.scandir/out/index.d.ts", - "./src/readers/common.ts", - "./src/readers/reader.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/readers/common.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "./src/readers/common.ts", - "./src/settings.ts", - "./src/tests/index.ts" - ], - "./src/readers/common.ts": [ - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/readers/reader.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "./src/readers/reader.ts", - "./src/settings.ts" - ], - "./src/readers/reader.ts": [ - "./src/readers/common.ts", - "./src/settings.ts" - ], - "./src/readers/sync.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "./src/readers/sync.ts", - "./src/settings.ts", - "./src/tests/index.ts" - ], - "./src/readers/sync.ts": [ - "../fs.scandir/out/index.d.ts", - "./src/readers/common.ts", - "./src/readers/reader.ts", - "./src/types/index.ts" - ], - "./src/settings.spec.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../fs.scandir/out/index.d.ts", - "./src/settings.ts" - ], - "./src/settings.ts": [ - "../../../node_modules/@types/node/path.d.ts", - "../fs.scandir/out/index.d.ts", - "./src/types/index.ts" - ], - "./src/tests/index.ts": [ - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "../fs.macchiato/out/index.d.ts", - "./src/types/index.ts" - ], - "./src/types/index.ts": [ - "../fs.scandir/out/index.d.ts" - ] - }, - "exportedModulesMap": { - "../../../node_modules/@types/fs-extra/index.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/glob/index.d.ts": [ - "../../../node_modules/@types/minimatch/index.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/array.d.ts": [ - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/collection.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/common.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/date.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/function.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/lang.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/math.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/number.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/object.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/seq.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/string.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/common/util.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/index.d.ts" - ], - "../../../node_modules/@types/lodash/index.d.ts": [ - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts" - ], - "../../../node_modules/@types/node/base.d.ts": [ - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/ts3.6/base.d.ts" - ], - "../../../node_modules/@types/node/child_process.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/cluster.d.ts": [ - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/crypto.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/dgram.d.ts": [ - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/domain.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/fs.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/http.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/http2.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/https.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/url.d.ts" - ], - "../../../node_modules/@types/node/index.d.ts": [ - "../../../node_modules/@types/node/base.d.ts" - ], - "../../../node_modules/@types/node/inspector.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/net.d.ts": [ - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/perf_hooks.d.ts": [ - "../../../node_modules/@types/node/async_hooks.d.ts" - ], - "../../../node_modules/@types/node/process.d.ts": [ - "../../../node_modules/@types/node/tty.d.ts" - ], - "../../../node_modules/@types/node/readline.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/repl.d.ts": [ - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/vm.d.ts" - ], - "../../../node_modules/@types/node/stream.d.ts": [ - "../../../node_modules/@types/node/events.d.ts" - ], - "../../../node_modules/@types/node/tls.d.ts": [ - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/ts3.3/base.d.ts": [ - "../../../node_modules/@types/node/async_hooks.d.ts", - "../../../node_modules/@types/node/buffer.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/cluster.d.ts", - "../../../node_modules/@types/node/console.d.ts", - "../../../node_modules/@types/node/constants.d.ts", - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dgram.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/domain.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/globals.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/http2.d.ts", - "../../../node_modules/@types/node/https.d.ts", - "../../../node_modules/@types/node/inspector.d.ts", - "../../../node_modules/@types/node/module.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/os.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/node/perf_hooks.d.ts", - "../../../node_modules/@types/node/process.d.ts", - "../../../node_modules/@types/node/punycode.d.ts", - "../../../node_modules/@types/node/querystring.d.ts", - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/repl.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/string_decoder.d.ts", - "../../../node_modules/@types/node/timers.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/trace_events.d.ts", - "../../../node_modules/@types/node/tty.d.ts", - "../../../node_modules/@types/node/url.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/v8.d.ts", - "../../../node_modules/@types/node/vm.d.ts", - "../../../node_modules/@types/node/worker_threads.d.ts", - "../../../node_modules/@types/node/zlib.d.ts" - ], - "../../../node_modules/@types/node/ts3.6/base.d.ts": [ - "../../../node_modules/@types/node/globals.global.d.ts", - "../../../node_modules/@types/node/ts3.3/base.d.ts", - "../../../node_modules/@types/node/wasi.d.ts" - ], - "../../../node_modules/@types/node/tty.d.ts": [ - "../../../node_modules/@types/node/net.d.ts" - ], - "../../../node_modules/@types/node/url.d.ts": [ - "../../../node_modules/@types/node/querystring.d.ts" - ], - "../../../node_modules/@types/node/v8.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/node/worker_threads.d.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/vm.d.ts" - ], - "../../../node_modules/@types/node/zlib.d.ts": [ - "../../../node_modules/@types/node/stream.d.ts" - ], - "../../../node_modules/@types/rimraf/index.d.ts": [ - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../../../node_modules/@types/shelljs/index.d.ts": [ - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.macchiato/out/dirent.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../fs.macchiato/out/types.d.ts" - ], - "../fs.macchiato/out/index.d.ts": [ - "../fs.macchiato/out/dirent.d.ts", - "../fs.macchiato/out/stats.d.ts" - ], - "../fs.macchiato/out/stats.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../fs.macchiato/out/types.d.ts" - ], - "../fs.scandir/out/adapters/fs.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.scandir/out/index.d.ts": [ - "../fs.scandir/out/adapters/fs.d.ts", - "../fs.scandir/out/providers/async.d.ts", - "../fs.scandir/out/settings.d.ts", - "../fs.scandir/out/types/index.d.ts" - ], - "../fs.scandir/out/providers/async.d.ts": [ - "../../../node_modules/@types/node/index.d.ts", - "../fs.scandir/out/settings.d.ts", - "../fs.scandir/out/types/index.d.ts" - ], - "../fs.scandir/out/settings.d.ts": [ - "../fs.scandir/out/adapters/fs.d.ts", - "../fs.stat/out/index.d.ts" - ], - "../fs.scandir/out/types/index.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.stat/out/adapters/fs.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "../fs.stat/out/index.d.ts": [ - "../fs.stat/out/adapters/fs.d.ts", - "../fs.stat/out/providers/async.d.ts", - "../fs.stat/out/settings.d.ts", - "../fs.stat/out/types/index.d.ts" - ], - "../fs.stat/out/providers/async.d.ts": [ - "../fs.stat/out/settings.d.ts", - "../fs.stat/out/types/index.d.ts" - ], - "../fs.stat/out/settings.d.ts": [ - "../fs.stat/out/adapters/fs.d.ts" - ], - "../fs.stat/out/types/index.d.ts": [ - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/index.d.ts" - ], - "./src/index.ts": [ - "../../../node_modules/@types/node/stream.d.ts", - "../fs.scandir/out/index.d.ts", - "./src/providers/async.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/async.ts": [ - "./src/readers/async.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/providers/index.ts": [ - "./src/providers/async.ts", - "./src/providers/stream.ts", - "./src/providers/sync.ts" - ], - "./src/providers/stream.ts": [ - "../../../node_modules/@types/node/stream.d.ts", - "./src/readers/async.ts", - "./src/settings.ts" - ], - "./src/providers/sync.ts": [ - "./src/readers/sync.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/readers/async.ts": [ - "../../../node_modules/@types/node/events.d.ts", - "../fs.scandir/out/index.d.ts", - "./src/readers/reader.ts", - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/readers/common.ts": [ - "./src/settings.ts", - "./src/types/index.ts" - ], - "./src/readers/reader.ts": [ - "./src/settings.ts" - ], - "./src/readers/sync.ts": [ - "../fs.scandir/out/index.d.ts", - "./src/readers/reader.ts", - "./src/types/index.ts" - ], - "./src/settings.ts": [ - "../fs.scandir/out/index.d.ts", - "./src/types/index.ts" - ], - "./src/tests/index.ts": [ - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "./src/types/index.ts" - ], - "./src/types/index.ts": [ - "../fs.scandir/out/index.d.ts" - ] - }, - "semanticDiagnosticsPerFile": [ - "../../../node_modules/@types/eslint-visitor-keys/index.d.ts", - "../../../node_modules/@types/fs-extra/index.d.ts", - "../../../node_modules/@types/glob/index.d.ts", - "../../../node_modules/@types/highlight.js/index.d.ts", - "../../../node_modules/@types/json-schema/index.d.ts", - "../../../node_modules/@types/lodash/common/array.d.ts", - "../../../node_modules/@types/lodash/common/collection.d.ts", - "../../../node_modules/@types/lodash/common/common.d.ts", - "../../../node_modules/@types/lodash/common/date.d.ts", - "../../../node_modules/@types/lodash/common/function.d.ts", - "../../../node_modules/@types/lodash/common/lang.d.ts", - "../../../node_modules/@types/lodash/common/math.d.ts", - "../../../node_modules/@types/lodash/common/number.d.ts", - "../../../node_modules/@types/lodash/common/object.d.ts", - "../../../node_modules/@types/lodash/common/seq.d.ts", - "../../../node_modules/@types/lodash/common/string.d.ts", - "../../../node_modules/@types/lodash/common/util.d.ts", - "../../../node_modules/@types/lodash/index.d.ts", - "../../../node_modules/@types/marked/index.d.ts", - "../../../node_modules/@types/minimatch/index.d.ts", - "../../../node_modules/@types/minimist/index.d.ts", - "../../../node_modules/@types/mocha/index.d.ts", - "../../../node_modules/@types/node/assert.d.ts", - "../../../node_modules/@types/node/async_hooks.d.ts", - "../../../node_modules/@types/node/base.d.ts", - "../../../node_modules/@types/node/buffer.d.ts", - "../../../node_modules/@types/node/child_process.d.ts", - "../../../node_modules/@types/node/cluster.d.ts", - "../../../node_modules/@types/node/console.d.ts", - "../../../node_modules/@types/node/constants.d.ts", - "../../../node_modules/@types/node/crypto.d.ts", - "../../../node_modules/@types/node/dgram.d.ts", - "../../../node_modules/@types/node/dns.d.ts", - "../../../node_modules/@types/node/domain.d.ts", - "../../../node_modules/@types/node/events.d.ts", - "../../../node_modules/@types/node/fs.d.ts", - "../../../node_modules/@types/node/globals.d.ts", - "../../../node_modules/@types/node/globals.global.d.ts", - "../../../node_modules/@types/node/http.d.ts", - "../../../node_modules/@types/node/http2.d.ts", - "../../../node_modules/@types/node/https.d.ts", - "../../../node_modules/@types/node/index.d.ts", - "../../../node_modules/@types/node/inspector.d.ts", - "../../../node_modules/@types/node/module.d.ts", - "../../../node_modules/@types/node/net.d.ts", - "../../../node_modules/@types/node/os.d.ts", - "../../../node_modules/@types/node/path.d.ts", - "../../../node_modules/@types/node/perf_hooks.d.ts", - "../../../node_modules/@types/node/process.d.ts", - "../../../node_modules/@types/node/punycode.d.ts", - "../../../node_modules/@types/node/querystring.d.ts", - "../../../node_modules/@types/node/readline.d.ts", - "../../../node_modules/@types/node/repl.d.ts", - "../../../node_modules/@types/node/stream.d.ts", - "../../../node_modules/@types/node/string_decoder.d.ts", - "../../../node_modules/@types/node/timers.d.ts", - "../../../node_modules/@types/node/tls.d.ts", - "../../../node_modules/@types/node/trace_events.d.ts", - "../../../node_modules/@types/node/ts3.3/base.d.ts", - "../../../node_modules/@types/node/ts3.6/base.d.ts", - "../../../node_modules/@types/node/tty.d.ts", - "../../../node_modules/@types/node/url.d.ts", - "../../../node_modules/@types/node/util.d.ts", - "../../../node_modules/@types/node/v8.d.ts", - "../../../node_modules/@types/node/vm.d.ts", - "../../../node_modules/@types/node/wasi.d.ts", - "../../../node_modules/@types/node/worker_threads.d.ts", - "../../../node_modules/@types/node/zlib.d.ts", - "../../../node_modules/@types/normalize-package-data/index.d.ts", - "../../../node_modules/@types/rimraf/index.d.ts", - "../../../node_modules/@types/run-parallel/index.d.ts", - "../../../node_modules/@types/shelljs/index.d.ts", - "../../../node_modules/@types/sinon/ts3.1/index.d.ts", - "../../../node_modules/fastq/index.d.ts", - "../../../node_modules/typescript/lib/lib.dom.d.ts", - "../../../node_modules/typescript/lib/lib.dom.iterable.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.collection.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.core.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.generator.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.iterable.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.promise.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.proxy.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.reflect.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.symbol.d.ts", - "../../../node_modules/typescript/lib/lib.es2015.symbol.wellknown.d.ts", - "../../../node_modules/typescript/lib/lib.es2016.array.include.d.ts", - "../../../node_modules/typescript/lib/lib.es2016.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.full.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.intl.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.object.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.sharedmemory.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.string.d.ts", - "../../../node_modules/typescript/lib/lib.es2017.typedarrays.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.asyncgenerator.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.asynciterable.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.intl.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.promise.d.ts", - "../../../node_modules/typescript/lib/lib.es2018.regexp.d.ts", - "../../../node_modules/typescript/lib/lib.es2020.bigint.d.ts", - "../../../node_modules/typescript/lib/lib.es5.d.ts", - "../../../node_modules/typescript/lib/lib.esnext.intl.d.ts", - "../../../node_modules/typescript/lib/lib.scripthost.d.ts", - "../../../node_modules/typescript/lib/lib.webworker.importscripts.d.ts", - "../fs.macchiato/out/dirent.d.ts", - "../fs.macchiato/out/index.d.ts", - "../fs.macchiato/out/stats.d.ts", - "../fs.macchiato/out/types.d.ts", - "../fs.scandir/out/adapters/fs.d.ts", - "../fs.scandir/out/index.d.ts", - "../fs.scandir/out/providers/async.d.ts", - "../fs.scandir/out/settings.d.ts", - "../fs.scandir/out/types/index.d.ts", - "../fs.stat/out/adapters/fs.d.ts", - "../fs.stat/out/index.d.ts", - "../fs.stat/out/providers/async.d.ts", - "../fs.stat/out/settings.d.ts", - "../fs.stat/out/types/index.d.ts", - "./src/index.spec.ts", - "./src/index.ts", - "./src/providers/async.spec.ts", - "./src/providers/async.ts", - "./src/providers/index.ts", - "./src/providers/stream.spec.ts", - "./src/providers/stream.ts", - "./src/providers/sync.spec.ts", - "./src/providers/sync.ts", - "./src/readers/async.spec.ts", - "./src/readers/async.ts", - "./src/readers/common.spec.ts", - "./src/readers/common.ts", - "./src/readers/reader.spec.ts", - "./src/readers/reader.ts", - "./src/readers/sync.spec.ts", - "./src/readers/sync.ts", - "./src/settings.spec.ts", - "./src/settings.ts", - "./src/tests/index.ts", - "./src/types/index.ts" - ] - }, - "version": "3.9.7" -} \ No newline at end of file diff --git a/node_modules/@octokit/core/README.md b/node_modules/@octokit/core/README.md index 0372f459..129d6ce0 100644 --- a/node_modules/@octokit/core/README.md +++ b/node_modules/@octokit/core/README.md @@ -360,7 +360,7 @@ octokit.hook.after("request", async (response, options) => { }); octokit.hook.error("request", async (error, options) => { if (error.status === 304) { - return findInCache(error.headers.etag); + return findInCache(error.response.headers.etag); } throw error; diff --git a/node_modules/@octokit/core/dist-node/index.js b/node_modules/@octokit/core/dist-node/index.js index 62983432..5ad37e1b 100644 --- a/node_modules/@octokit/core/dist-node/index.js +++ b/node_modules/@octokit/core/dist-node/index.js @@ -44,8 +44,9 @@ function _objectWithoutProperties(source, excluded) { return target; } -const VERSION = "3.4.0"; +const VERSION = "3.5.1"; +const _excluded = ["authStrategy"]; class Octokit { constructor(options = {}) { const hook = new beforeAfterHook.Collection(); @@ -107,7 +108,7 @@ class Octokit { const { authStrategy } = options, - otherOptions = _objectWithoutProperties(options, ["authStrategy"]); + otherOptions = _objectWithoutProperties(options, _excluded); const auth = authStrategy(Object.assign({ request: this.request, diff --git a/node_modules/@octokit/core/dist-node/index.js.map b/node_modules/@octokit/core/dist-node/index.js.map index 4c344365..19485f9f 100644 --- a/node_modules/@octokit/core/dist-node/index.js.map +++ b/node_modules/@octokit/core/dist-node/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"3.4.0\";\n","import { getUserAgent } from \"universal-user-agent\";\nimport { Collection } from \"before-after-hook\";\nimport { request } from \"@octokit/request\";\nimport { withCustomRequest } from \"@octokit/graphql\";\nimport { createTokenAuth } from \"@octokit/auth-token\";\nimport { VERSION } from \"./version\";\nexport class Octokit {\n constructor(options = {}) {\n const hook = new Collection();\n const requestDefaults = {\n baseUrl: request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\"),\n }),\n mediaType: {\n previews: [],\n format: \"\",\n },\n };\n // prepend default user agent with `options.userAgent` if set\n requestDefaults.headers[\"user-agent\"] = [\n options.userAgent,\n `octokit-core.js/${VERSION} ${getUserAgent()}`,\n ]\n .filter(Boolean)\n .join(\" \");\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n this.request = request.defaults(requestDefaults);\n this.graphql = withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => { },\n info: () => { },\n warn: console.warn.bind(console),\n error: console.error.bind(console),\n }, options.log);\n this.hook = hook;\n // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\",\n });\n }\n else {\n // (2)\n const auth = createTokenAuth(options.auth);\n // @ts-ignore ¯\\_(ツ)_/¯\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n }\n else {\n const { authStrategy, ...otherOptions } = options;\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions,\n }, options.auth));\n // @ts-ignore ¯\\_(ツ)_/¯\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n // apply plugins\n // https://stackoverflow.com/a/16345172\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach((plugin) => {\n Object.assign(this, plugin(this, options));\n });\n }\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent\n ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`,\n }\n : null));\n }\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n static plugin(...newPlugins) {\n var _a;\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {\n },\n _a.plugins = currentPlugins.concat(newPlugins.filter((plugin) => !currentPlugins.includes(plugin))),\n _a);\n return NewOctokit;\n }\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n"],"names":["VERSION","Octokit","constructor","options","hook","Collection","requestDefaults","baseUrl","request","endpoint","DEFAULTS","headers","Object","assign","bind","mediaType","previews","format","userAgent","getUserAgent","filter","Boolean","join","timeZone","defaults","graphql","withCustomRequest","log","debug","info","warn","console","error","authStrategy","auth","type","createTokenAuth","wrap","otherOptions","octokit","octokitOptions","classConstructor","plugins","forEach","plugin","OctokitWithDefaults","args","newPlugins","_a","currentPlugins","NewOctokit","concat","includes"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAO,MAAMA,OAAO,GAAG,mBAAhB;;ACMA,MAAMC,OAAN,CAAc;AACjBC,EAAAA,WAAW,CAACC,OAAO,GAAG,EAAX,EAAe;AACtB,UAAMC,IAAI,GAAG,IAAIC,0BAAJ,EAAb;AACA,UAAMC,eAAe,GAAG;AACpBC,MAAAA,OAAO,EAAEC,eAAO,CAACC,QAAR,CAAiBC,QAAjB,CAA0BH,OADf;AAEpBI,MAAAA,OAAO,EAAE,EAFW;AAGpBH,MAAAA,OAAO,EAAEI,MAAM,CAACC,MAAP,CAAc,EAAd,EAAkBV,OAAO,CAACK,OAA1B,EAAmC;AACxC;AACAJ,QAAAA,IAAI,EAAEA,IAAI,CAACU,IAAL,CAAU,IAAV,EAAgB,SAAhB;AAFkC,OAAnC,CAHW;AAOpBC,MAAAA,SAAS,EAAE;AACPC,QAAAA,QAAQ,EAAE,EADH;AAEPC,QAAAA,MAAM,EAAE;AAFD;AAPS,KAAxB,CAFsB;;AAetBX,IAAAA,eAAe,CAACK,OAAhB,CAAwB,YAAxB,IAAwC,CACpCR,OAAO,CAACe,SAD4B,EAEnC,mBAAkBlB,OAAQ,IAAGmB,+BAAY,EAAG,EAFT,EAInCC,MAJmC,CAI5BC,OAJ4B,EAKnCC,IALmC,CAK9B,GAL8B,CAAxC;;AAMA,QAAInB,OAAO,CAACI,OAAZ,EAAqB;AACjBD,MAAAA,eAAe,CAACC,OAAhB,GAA0BJ,OAAO,CAACI,OAAlC;AACH;;AACD,QAAIJ,OAAO,CAACa,QAAZ,EAAsB;AAClBV,MAAAA,eAAe,CAACS,SAAhB,CAA0BC,QAA1B,GAAqCb,OAAO,CAACa,QAA7C;AACH;;AACD,QAAIb,OAAO,CAACoB,QAAZ,EAAsB;AAClBjB,MAAAA,eAAe,CAACK,OAAhB,CAAwB,WAAxB,IAAuCR,OAAO,CAACoB,QAA/C;AACH;;AACD,SAAKf,OAAL,GAAeA,eAAO,CAACgB,QAAR,CAAiBlB,eAAjB,CAAf;AACA,SAAKmB,OAAL,GAAeC,yBAAiB,CAAC,KAAKlB,OAAN,CAAjB,CAAgCgB,QAAhC,CAAyClB,eAAzC,CAAf;AACA,SAAKqB,GAAL,GAAWf,MAAM,CAACC,MAAP,CAAc;AACrBe,MAAAA,KAAK,EAAE,MAAM,EADQ;AAErBC,MAAAA,IAAI,EAAE,MAAM,EAFS;AAGrBC,MAAAA,IAAI,EAAEC,OAAO,CAACD,IAAR,CAAahB,IAAb,CAAkBiB,OAAlB,CAHe;AAIrBC,MAAAA,KAAK,EAAED,OAAO,CAACC,KAAR,CAAclB,IAAd,CAAmBiB,OAAnB;AAJc,KAAd,EAKR5B,OAAO,CAACwB,GALA,CAAX;AAMA,SAAKvB,IAAL,GAAYA,IAAZ,CAtCsB;AAwCtB;AACA;AACA;AACA;;AACA,QAAI,CAACD,OAAO,CAAC8B,YAAb,EAA2B;AACvB,UAAI,CAAC9B,OAAO,CAAC+B,IAAb,EAAmB;AACf;AACA,aAAKA,IAAL,GAAY,aAAa;AACrBC,UAAAA,IAAI,EAAE;AADe,SAAb,CAAZ;AAGH,OALD,MAMK;AACD;AACA,cAAMD,IAAI,GAAGE,yBAAe,CAACjC,OAAO,CAAC+B,IAAT,CAA5B,CAFC;;AAID9B,QAAAA,IAAI,CAACiC,IAAL,CAAU,SAAV,EAAqBH,IAAI,CAAC9B,IAA1B;AACA,aAAK8B,IAAL,GAAYA,IAAZ;AACH;AACJ,KAdD,MAeK;AACD,YAAM;AAAED,QAAAA;AAAF,UAAoC9B,OAA1C;AAAA,YAAyBmC,YAAzB,4BAA0CnC,OAA1C;;AACA,YAAM+B,IAAI,GAAGD,YAAY,CAACrB,MAAM,CAACC,MAAP,CAAc;AACpCL,QAAAA,OAAO,EAAE,KAAKA,OADsB;AAEpCmB,QAAAA,GAAG,EAAE,KAAKA,GAF0B;AAGpC;AACA;AACA;AACA;AACA;AACAY,QAAAA,OAAO,EAAE,IAR2B;AASpCC,QAAAA,cAAc,EAAEF;AAToB,OAAd,EAUvBnC,OAAO,CAAC+B,IAVe,CAAD,CAAzB,CAFC;;AAcD9B,MAAAA,IAAI,CAACiC,IAAL,CAAU,SAAV,EAAqBH,IAAI,CAAC9B,IAA1B;AACA,WAAK8B,IAAL,GAAYA,IAAZ;AACH,KA3EqB;AA6EtB;;;AACA,UAAMO,gBAAgB,GAAG,KAAKvC,WAA9B;AACAuC,IAAAA,gBAAgB,CAACC,OAAjB,CAAyBC,OAAzB,CAAkCC,MAAD,IAAY;AACzChC,MAAAA,MAAM,CAACC,MAAP,CAAc,IAAd,EAAoB+B,MAAM,CAAC,IAAD,EAAOzC,OAAP,CAA1B;AACH,KAFD;AAGH;;AACc,SAARqB,QAAQ,CAACA,QAAD,EAAW;AACtB,UAAMqB,mBAAmB,GAAG,cAAc,IAAd,CAAmB;AAC3C3C,MAAAA,WAAW,CAAC,GAAG4C,IAAJ,EAAU;AACjB,cAAM3C,OAAO,GAAG2C,IAAI,CAAC,CAAD,CAAJ,IAAW,EAA3B;;AACA,YAAI,OAAOtB,QAAP,KAAoB,UAAxB,EAAoC;AAChC,gBAAMA,QAAQ,CAACrB,OAAD,CAAd;AACA;AACH;;AACD,cAAMS,MAAM,CAACC,MAAP,CAAc,EAAd,EAAkBW,QAAlB,EAA4BrB,OAA5B,EAAqCA,OAAO,CAACe,SAAR,IAAqBM,QAAQ,CAACN,SAA9B,GACrC;AACEA,UAAAA,SAAS,EAAG,GAAEf,OAAO,CAACe,SAAU,IAAGM,QAAQ,CAACN,SAAU;AADxD,SADqC,GAIrC,IAJA,CAAN;AAKH;;AAZ0C,KAA/C;AAcA,WAAO2B,mBAAP;AACH;AACD;AACJ;AACA;AACA;AACA;AACA;;;AACiB,SAAND,MAAM,CAAC,GAAGG,UAAJ,EAAgB;AACzB,QAAIC,EAAJ;;AACA,UAAMC,cAAc,GAAG,KAAKP,OAA5B;AACA,UAAMQ,UAAU,IAAIF,EAAE,GAAG,cAAc,IAAd,CAAmB,EAAxB,EAEhBA,EAAE,CAACN,OAAH,GAAaO,cAAc,CAACE,MAAf,CAAsBJ,UAAU,CAAC3B,MAAX,CAAmBwB,MAAD,IAAY,CAACK,cAAc,CAACG,QAAf,CAAwBR,MAAxB,CAA/B,CAAtB,CAFG,EAGhBI,EAHY,CAAhB;AAIA,WAAOE,UAAP;AACH;;AAnHgB;AAqHrBjD,OAAO,CAACD,OAAR,GAAkBA,OAAlB;AACAC,OAAO,CAACyC,OAAR,GAAkB,EAAlB;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"3.5.1\";\n","import { getUserAgent } from \"universal-user-agent\";\nimport { Collection } from \"before-after-hook\";\nimport { request } from \"@octokit/request\";\nimport { withCustomRequest } from \"@octokit/graphql\";\nimport { createTokenAuth } from \"@octokit/auth-token\";\nimport { VERSION } from \"./version\";\nexport class Octokit {\n constructor(options = {}) {\n const hook = new Collection();\n const requestDefaults = {\n baseUrl: request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\"),\n }),\n mediaType: {\n previews: [],\n format: \"\",\n },\n };\n // prepend default user agent with `options.userAgent` if set\n requestDefaults.headers[\"user-agent\"] = [\n options.userAgent,\n `octokit-core.js/${VERSION} ${getUserAgent()}`,\n ]\n .filter(Boolean)\n .join(\" \");\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n this.request = request.defaults(requestDefaults);\n this.graphql = withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => { },\n info: () => { },\n warn: console.warn.bind(console),\n error: console.error.bind(console),\n }, options.log);\n this.hook = hook;\n // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\",\n });\n }\n else {\n // (2)\n const auth = createTokenAuth(options.auth);\n // @ts-ignore ¯\\_(ツ)_/¯\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n }\n else {\n const { authStrategy, ...otherOptions } = options;\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions,\n }, options.auth));\n // @ts-ignore ¯\\_(ツ)_/¯\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n // apply plugins\n // https://stackoverflow.com/a/16345172\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach((plugin) => {\n Object.assign(this, plugin(this, options));\n });\n }\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent\n ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`,\n }\n : null));\n }\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n static plugin(...newPlugins) {\n var _a;\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {\n },\n _a.plugins = currentPlugins.concat(newPlugins.filter((plugin) => !currentPlugins.includes(plugin))),\n _a);\n return NewOctokit;\n }\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n"],"names":["VERSION","Octokit","constructor","options","hook","Collection","requestDefaults","baseUrl","request","endpoint","DEFAULTS","headers","Object","assign","bind","mediaType","previews","format","userAgent","getUserAgent","filter","Boolean","join","timeZone","defaults","graphql","withCustomRequest","log","debug","info","warn","console","error","authStrategy","auth","type","createTokenAuth","wrap","otherOptions","octokit","octokitOptions","classConstructor","plugins","forEach","plugin","OctokitWithDefaults","args","newPlugins","_a","currentPlugins","NewOctokit","concat","includes"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAO,MAAMA,OAAO,GAAG,mBAAhB;;;ACAP,AAMO,MAAMC,OAAN,CAAc;AACjBC,EAAAA,WAAW,CAACC,OAAO,GAAG,EAAX,EAAe;AACtB,UAAMC,IAAI,GAAG,IAAIC,0BAAJ,EAAb;AACA,UAAMC,eAAe,GAAG;AACpBC,MAAAA,OAAO,EAAEC,eAAO,CAACC,QAAR,CAAiBC,QAAjB,CAA0BH,OADf;AAEpBI,MAAAA,OAAO,EAAE,EAFW;AAGpBH,MAAAA,OAAO,EAAEI,MAAM,CAACC,MAAP,CAAc,EAAd,EAAkBV,OAAO,CAACK,OAA1B,EAAmC;AACxC;AACAJ,QAAAA,IAAI,EAAEA,IAAI,CAACU,IAAL,CAAU,IAAV,EAAgB,SAAhB;AAFkC,OAAnC,CAHW;AAOpBC,MAAAA,SAAS,EAAE;AACPC,QAAAA,QAAQ,EAAE,EADH;AAEPC,QAAAA,MAAM,EAAE;AAFD;AAPS,KAAxB,CAFsB;;AAetBX,IAAAA,eAAe,CAACK,OAAhB,CAAwB,YAAxB,IAAwC,CACpCR,OAAO,CAACe,SAD4B,EAEnC,mBAAkBlB,OAAQ,IAAGmB,+BAAY,EAAG,EAFT,EAInCC,MAJmC,CAI5BC,OAJ4B,EAKnCC,IALmC,CAK9B,GAL8B,CAAxC;;AAMA,QAAInB,OAAO,CAACI,OAAZ,EAAqB;AACjBD,MAAAA,eAAe,CAACC,OAAhB,GAA0BJ,OAAO,CAACI,OAAlC;AACH;;AACD,QAAIJ,OAAO,CAACa,QAAZ,EAAsB;AAClBV,MAAAA,eAAe,CAACS,SAAhB,CAA0BC,QAA1B,GAAqCb,OAAO,CAACa,QAA7C;AACH;;AACD,QAAIb,OAAO,CAACoB,QAAZ,EAAsB;AAClBjB,MAAAA,eAAe,CAACK,OAAhB,CAAwB,WAAxB,IAAuCR,OAAO,CAACoB,QAA/C;AACH;;AACD,SAAKf,OAAL,GAAeA,eAAO,CAACgB,QAAR,CAAiBlB,eAAjB,CAAf;AACA,SAAKmB,OAAL,GAAeC,yBAAiB,CAAC,KAAKlB,OAAN,CAAjB,CAAgCgB,QAAhC,CAAyClB,eAAzC,CAAf;AACA,SAAKqB,GAAL,GAAWf,MAAM,CAACC,MAAP,CAAc;AACrBe,MAAAA,KAAK,EAAE,MAAM,EADQ;AAErBC,MAAAA,IAAI,EAAE,MAAM,EAFS;AAGrBC,MAAAA,IAAI,EAAEC,OAAO,CAACD,IAAR,CAAahB,IAAb,CAAkBiB,OAAlB,CAHe;AAIrBC,MAAAA,KAAK,EAAED,OAAO,CAACC,KAAR,CAAclB,IAAd,CAAmBiB,OAAnB;AAJc,KAAd,EAKR5B,OAAO,CAACwB,GALA,CAAX;AAMA,SAAKvB,IAAL,GAAYA,IAAZ,CAtCsB;AAwCtB;AACA;AACA;AACA;;AACA,QAAI,CAACD,OAAO,CAAC8B,YAAb,EAA2B;AACvB,UAAI,CAAC9B,OAAO,CAAC+B,IAAb,EAAmB;AACf;AACA,aAAKA,IAAL,GAAY,aAAa;AACrBC,UAAAA,IAAI,EAAE;AADe,SAAb,CAAZ;AAGH,OALD,MAMK;AACD;AACA,cAAMD,IAAI,GAAGE,yBAAe,CAACjC,OAAO,CAAC+B,IAAT,CAA5B,CAFC;;AAID9B,QAAAA,IAAI,CAACiC,IAAL,CAAU,SAAV,EAAqBH,IAAI,CAAC9B,IAA1B;AACA,aAAK8B,IAAL,GAAYA,IAAZ;AACH;AACJ,KAdD,MAeK;AACD,YAAM;AAAED,QAAAA;AAAF,UAAoC9B,OAA1C;AAAA,YAAyBmC,YAAzB,4BAA0CnC,OAA1C;;AACA,YAAM+B,IAAI,GAAGD,YAAY,CAACrB,MAAM,CAACC,MAAP,CAAc;AACpCL,QAAAA,OAAO,EAAE,KAAKA,OADsB;AAEpCmB,QAAAA,GAAG,EAAE,KAAKA,GAF0B;AAGpC;AACA;AACA;AACA;AACA;AACAY,QAAAA,OAAO,EAAE,IAR2B;AASpCC,QAAAA,cAAc,EAAEF;AAToB,OAAd,EAUvBnC,OAAO,CAAC+B,IAVe,CAAD,CAAzB,CAFC;;AAcD9B,MAAAA,IAAI,CAACiC,IAAL,CAAU,SAAV,EAAqBH,IAAI,CAAC9B,IAA1B;AACA,WAAK8B,IAAL,GAAYA,IAAZ;AACH,KA3EqB;AA6EtB;;;AACA,UAAMO,gBAAgB,GAAG,KAAKvC,WAA9B;AACAuC,IAAAA,gBAAgB,CAACC,OAAjB,CAAyBC,OAAzB,CAAkCC,MAAD,IAAY;AACzChC,MAAAA,MAAM,CAACC,MAAP,CAAc,IAAd,EAAoB+B,MAAM,CAAC,IAAD,EAAOzC,OAAP,CAA1B;AACH,KAFD;AAGH;;AACc,SAARqB,QAAQ,CAACA,QAAD,EAAW;AACtB,UAAMqB,mBAAmB,GAAG,cAAc,IAAd,CAAmB;AAC3C3C,MAAAA,WAAW,CAAC,GAAG4C,IAAJ,EAAU;AACjB,cAAM3C,OAAO,GAAG2C,IAAI,CAAC,CAAD,CAAJ,IAAW,EAA3B;;AACA,YAAI,OAAOtB,QAAP,KAAoB,UAAxB,EAAoC;AAChC,gBAAMA,QAAQ,CAACrB,OAAD,CAAd;AACA;AACH;;AACD,cAAMS,MAAM,CAACC,MAAP,CAAc,EAAd,EAAkBW,QAAlB,EAA4BrB,OAA5B,EAAqCA,OAAO,CAACe,SAAR,IAAqBM,QAAQ,CAACN,SAA9B,GACrC;AACEA,UAAAA,SAAS,EAAG,GAAEf,OAAO,CAACe,SAAU,IAAGM,QAAQ,CAACN,SAAU;AADxD,SADqC,GAIrC,IAJA,CAAN;AAKH;;AAZ0C,KAA/C;AAcA,WAAO2B,mBAAP;AACH;AACD;AACJ;AACA;AACA;AACA;AACA;;;AACiB,SAAND,MAAM,CAAC,GAAGG,UAAJ,EAAgB;AACzB,QAAIC,EAAJ;;AACA,UAAMC,cAAc,GAAG,KAAKP,OAA5B;AACA,UAAMQ,UAAU,IAAIF,EAAE,GAAG,cAAc,IAAd,CAAmB,EAAxB,EAEhBA,EAAE,CAACN,OAAH,GAAaO,cAAc,CAACE,MAAf,CAAsBJ,UAAU,CAAC3B,MAAX,CAAmBwB,MAAD,IAAY,CAACK,cAAc,CAACG,QAAf,CAAwBR,MAAxB,CAA/B,CAAtB,CAFG,EAGhBI,EAHY,CAAhB;AAIA,WAAOE,UAAP;AACH;;AAnHgB;AAqHrBjD,OAAO,CAACD,OAAR,GAAkBA,OAAlB;AACAC,OAAO,CAACyC,OAAR,GAAkB,EAAlB;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/core/dist-src/version.js b/node_modules/@octokit/core/dist-src/version.js index 6c03212c..f53b6355 100644 --- a/node_modules/@octokit/core/dist-src/version.js +++ b/node_modules/@octokit/core/dist-src/version.js @@ -1 +1 @@ -export const VERSION = "3.4.0"; +export const VERSION = "3.5.1"; diff --git a/node_modules/@octokit/core/dist-types/types.d.ts b/node_modules/@octokit/core/dist-types/types.d.ts index 86ae62d8..06c0f7a2 100644 --- a/node_modules/@octokit/core/dist-types/types.d.ts +++ b/node_modules/@octokit/core/dist-types/types.d.ts @@ -19,7 +19,7 @@ export declare type OctokitOptions = { [option: string]: any; }; export declare type Constructor = new (...args: any[]) => T; -export declare type ReturnTypeOf = T extends AnyFunction ? ReturnType : T extends AnyFunction[] ? UnionToIntersection> : never; +export declare type ReturnTypeOf = T extends AnyFunction ? ReturnType : T extends AnyFunction[] ? UnionToIntersection, void>> : never; /** * @author https://stackoverflow.com/users/2887218/jcalz * @see https://stackoverflow.com/a/50375286/10325032 diff --git a/node_modules/@octokit/core/dist-types/version.d.ts b/node_modules/@octokit/core/dist-types/version.d.ts index d19afac4..8bb7c7ff 100644 --- a/node_modules/@octokit/core/dist-types/version.d.ts +++ b/node_modules/@octokit/core/dist-types/version.d.ts @@ -1 +1 @@ -export declare const VERSION = "3.4.0"; +export declare const VERSION = "3.5.1"; diff --git a/node_modules/@octokit/core/dist-web/index.js b/node_modules/@octokit/core/dist-web/index.js index 08ecf0be..2828d970 100644 --- a/node_modules/@octokit/core/dist-web/index.js +++ b/node_modules/@octokit/core/dist-web/index.js @@ -4,7 +4,7 @@ import { request } from '@octokit/request'; import { withCustomRequest } from '@octokit/graphql'; import { createTokenAuth } from '@octokit/auth-token'; -const VERSION = "3.4.0"; +const VERSION = "3.5.1"; class Octokit { constructor(options = {}) { diff --git a/node_modules/@octokit/core/dist-web/index.js.map b/node_modules/@octokit/core/dist-web/index.js.map index b5f8f91e..084574c6 100644 --- a/node_modules/@octokit/core/dist-web/index.js.map +++ b/node_modules/@octokit/core/dist-web/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"3.4.0\";\n","import { getUserAgent } from \"universal-user-agent\";\nimport { Collection } from \"before-after-hook\";\nimport { request } from \"@octokit/request\";\nimport { withCustomRequest } from \"@octokit/graphql\";\nimport { createTokenAuth } from \"@octokit/auth-token\";\nimport { VERSION } from \"./version\";\nexport class Octokit {\n constructor(options = {}) {\n const hook = new Collection();\n const requestDefaults = {\n baseUrl: request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\"),\n }),\n mediaType: {\n previews: [],\n format: \"\",\n },\n };\n // prepend default user agent with `options.userAgent` if set\n requestDefaults.headers[\"user-agent\"] = [\n options.userAgent,\n `octokit-core.js/${VERSION} ${getUserAgent()}`,\n ]\n .filter(Boolean)\n .join(\" \");\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n this.request = request.defaults(requestDefaults);\n this.graphql = withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => { },\n info: () => { },\n warn: console.warn.bind(console),\n error: console.error.bind(console),\n }, options.log);\n this.hook = hook;\n // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\",\n });\n }\n else {\n // (2)\n const auth = createTokenAuth(options.auth);\n // @ts-ignore ¯\\_(ツ)_/¯\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n }\n else {\n const { authStrategy, ...otherOptions } = options;\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions,\n }, options.auth));\n // @ts-ignore ¯\\_(ツ)_/¯\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n // apply plugins\n // https://stackoverflow.com/a/16345172\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach((plugin) => {\n Object.assign(this, plugin(this, options));\n });\n }\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent\n ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`,\n }\n : null));\n }\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n static plugin(...newPlugins) {\n var _a;\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {\n },\n _a.plugins = currentPlugins.concat(newPlugins.filter((plugin) => !currentPlugins.includes(plugin))),\n _a);\n return NewOctokit;\n }\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n"],"names":[],"mappings":";;;;;;AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACMnC,MAAM,OAAO,CAAC;AACrB,IAAI,WAAW,CAAC,OAAO,GAAG,EAAE,EAAE;AAC9B,QAAQ,MAAM,IAAI,GAAG,IAAI,UAAU,EAAE,CAAC;AACtC,QAAQ,MAAM,eAAe,GAAG;AAChC,YAAY,OAAO,EAAE,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO;AACtD,YAAY,OAAO,EAAE,EAAE;AACvB,YAAY,OAAO,EAAE,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,EAAE;AACxD;AACA,gBAAgB,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,SAAS,CAAC;AAChD,aAAa,CAAC;AACd,YAAY,SAAS,EAAE;AACvB,gBAAgB,QAAQ,EAAE,EAAE;AAC5B,gBAAgB,MAAM,EAAE,EAAE;AAC1B,aAAa;AACb,SAAS,CAAC;AACV;AACA,QAAQ,eAAe,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG;AAChD,YAAY,OAAO,CAAC,SAAS;AAC7B,YAAY,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC;AAC1D,SAAS;AACT,aAAa,MAAM,CAAC,OAAO,CAAC;AAC5B,aAAa,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,QAAQ,IAAI,OAAO,CAAC,OAAO,EAAE;AAC7B,YAAY,eAAe,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACtD,SAAS;AACT,QAAQ,IAAI,OAAO,CAAC,QAAQ,EAAE;AAC9B,YAAY,eAAe,CAAC,SAAS,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;AAClE,SAAS;AACT,QAAQ,IAAI,OAAO,CAAC,QAAQ,EAAE;AAC9B,YAAY,eAAe,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,OAAO,CAAC,QAAQ,CAAC;AACpE,SAAS;AACT,QAAQ,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC;AACzD,QAAQ,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC;AACjF,QAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,MAAM,CAAC;AACjC,YAAY,KAAK,EAAE,MAAM,GAAG;AAC5B,YAAY,IAAI,EAAE,MAAM,GAAG;AAC3B,YAAY,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC;AAC5C,YAAY,KAAK,EAAE,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC;AAC9C,SAAS,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;AACxB,QAAQ,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACzB;AACA;AACA;AACA;AACA;AACA,QAAQ,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;AACnC,YAAY,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE;AAC/B;AACA,gBAAgB,IAAI,CAAC,IAAI,GAAG,aAAa;AACzC,oBAAoB,IAAI,EAAE,iBAAiB;AAC3C,iBAAiB,CAAC,CAAC;AACnB,aAAa;AACb,iBAAiB;AACjB;AACA,gBAAgB,MAAM,IAAI,GAAG,eAAe,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAC3D;AACA,gBAAgB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;AAChD,gBAAgB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACjC,aAAa;AACb,SAAS;AACT,aAAa;AACb,YAAY,MAAM,EAAE,YAAY,EAAE,GAAG,YAAY,EAAE,GAAG,OAAO,CAAC;AAC9D,YAAY,MAAM,IAAI,GAAG,YAAY,CAAC,MAAM,CAAC,MAAM,CAAC;AACpD,gBAAgB,OAAO,EAAE,IAAI,CAAC,OAAO;AACrC,gBAAgB,GAAG,EAAE,IAAI,CAAC,GAAG;AAC7B;AACA;AACA;AACA;AACA;AACA,gBAAgB,OAAO,EAAE,IAAI;AAC7B,gBAAgB,cAAc,EAAE,YAAY;AAC5C,aAAa,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;AAC9B;AACA,YAAY,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;AAC5C,YAAY,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AAC7B,SAAS;AACT;AACA;AACA,QAAQ,MAAM,gBAAgB,GAAG,IAAI,CAAC,WAAW,CAAC;AAClD,QAAQ,gBAAgB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,MAAM,KAAK;AACrD,YAAY,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,CAAC;AACvD,SAAS,CAAC,CAAC;AACX,KAAK;AACL,IAAI,OAAO,QAAQ,CAAC,QAAQ,EAAE;AAC9B,QAAQ,MAAM,mBAAmB,GAAG,cAAc,IAAI,CAAC;AACvD,YAAY,WAAW,CAAC,GAAG,IAAI,EAAE;AACjC,gBAAgB,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;AAC9C,gBAAgB,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;AACpD,oBAAoB,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC;AAC7C,oBAAoB,OAAO;AAC3B,iBAAiB;AACjB,gBAAgB,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,IAAI,QAAQ,CAAC,SAAS;AAClG,sBAAsB;AACtB,wBAAwB,SAAS,EAAE,CAAC,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC;AAC/E,qBAAqB;AACrB,sBAAsB,IAAI,CAAC,CAAC,CAAC;AAC7B,aAAa;AACb,SAAS,CAAC;AACV,QAAQ,OAAO,mBAAmB,CAAC;AACnC,KAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA,IAAI,OAAO,MAAM,CAAC,GAAG,UAAU,EAAE;AACjC,QAAQ,IAAI,EAAE,CAAC;AACf,QAAQ,MAAM,cAAc,GAAG,IAAI,CAAC,OAAO,CAAC;AAC5C,QAAQ,MAAM,UAAU,IAAI,EAAE,GAAG,cAAc,IAAI,CAAC;AACpD,aAAa;AACb,YAAY,EAAE,CAAC,OAAO,GAAG,cAAc,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,MAAM,KAAK,CAAC,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC/G,YAAY,EAAE,CAAC,CAAC;AAChB,QAAQ,OAAO,UAAU,CAAC;AAC1B,KAAK;AACL,CAAC;AACD,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC;AAC1B,OAAO,CAAC,OAAO,GAAG,EAAE,CAAC;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"3.5.1\";\n","import { getUserAgent } from \"universal-user-agent\";\nimport { Collection } from \"before-after-hook\";\nimport { request } from \"@octokit/request\";\nimport { withCustomRequest } from \"@octokit/graphql\";\nimport { createTokenAuth } from \"@octokit/auth-token\";\nimport { VERSION } from \"./version\";\nexport class Octokit {\n constructor(options = {}) {\n const hook = new Collection();\n const requestDefaults = {\n baseUrl: request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\"),\n }),\n mediaType: {\n previews: [],\n format: \"\",\n },\n };\n // prepend default user agent with `options.userAgent` if set\n requestDefaults.headers[\"user-agent\"] = [\n options.userAgent,\n `octokit-core.js/${VERSION} ${getUserAgent()}`,\n ]\n .filter(Boolean)\n .join(\" \");\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n this.request = request.defaults(requestDefaults);\n this.graphql = withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => { },\n info: () => { },\n warn: console.warn.bind(console),\n error: console.error.bind(console),\n }, options.log);\n this.hook = hook;\n // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\",\n });\n }\n else {\n // (2)\n const auth = createTokenAuth(options.auth);\n // @ts-ignore ¯\\_(ツ)_/¯\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n }\n else {\n const { authStrategy, ...otherOptions } = options;\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions,\n }, options.auth));\n // @ts-ignore ¯\\_(ツ)_/¯\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n // apply plugins\n // https://stackoverflow.com/a/16345172\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach((plugin) => {\n Object.assign(this, plugin(this, options));\n });\n }\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent\n ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`,\n }\n : null));\n }\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n static plugin(...newPlugins) {\n var _a;\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {\n },\n _a.plugins = currentPlugins.concat(newPlugins.filter((plugin) => !currentPlugins.includes(plugin))),\n _a);\n return NewOctokit;\n }\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n"],"names":[],"mappings":";;;;;;AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACMnC,MAAM,OAAO,CAAC;AACrB,IAAI,WAAW,CAAC,OAAO,GAAG,EAAE,EAAE;AAC9B,QAAQ,MAAM,IAAI,GAAG,IAAI,UAAU,EAAE,CAAC;AACtC,QAAQ,MAAM,eAAe,GAAG;AAChC,YAAY,OAAO,EAAE,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO;AACtD,YAAY,OAAO,EAAE,EAAE;AACvB,YAAY,OAAO,EAAE,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,EAAE;AACxD;AACA,gBAAgB,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,SAAS,CAAC;AAChD,aAAa,CAAC;AACd,YAAY,SAAS,EAAE;AACvB,gBAAgB,QAAQ,EAAE,EAAE;AAC5B,gBAAgB,MAAM,EAAE,EAAE;AAC1B,aAAa;AACb,SAAS,CAAC;AACV;AACA,QAAQ,eAAe,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG;AAChD,YAAY,OAAO,CAAC,SAAS;AAC7B,YAAY,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC;AAC1D,SAAS;AACT,aAAa,MAAM,CAAC,OAAO,CAAC;AAC5B,aAAa,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,QAAQ,IAAI,OAAO,CAAC,OAAO,EAAE;AAC7B,YAAY,eAAe,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACtD,SAAS;AACT,QAAQ,IAAI,OAAO,CAAC,QAAQ,EAAE;AAC9B,YAAY,eAAe,CAAC,SAAS,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;AAClE,SAAS;AACT,QAAQ,IAAI,OAAO,CAAC,QAAQ,EAAE;AAC9B,YAAY,eAAe,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,OAAO,CAAC,QAAQ,CAAC;AACpE,SAAS;AACT,QAAQ,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC;AACzD,QAAQ,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC;AACjF,QAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,MAAM,CAAC;AACjC,YAAY,KAAK,EAAE,MAAM,GAAG;AAC5B,YAAY,IAAI,EAAE,MAAM,GAAG;AAC3B,YAAY,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC;AAC5C,YAAY,KAAK,EAAE,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC;AAC9C,SAAS,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;AACxB,QAAQ,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACzB;AACA;AACA;AACA;AACA;AACA,QAAQ,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;AACnC,YAAY,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE;AAC/B;AACA,gBAAgB,IAAI,CAAC,IAAI,GAAG,aAAa;AACzC,oBAAoB,IAAI,EAAE,iBAAiB;AAC3C,iBAAiB,CAAC,CAAC;AACnB,aAAa;AACb,iBAAiB;AACjB;AACA,gBAAgB,MAAM,IAAI,GAAG,eAAe,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAC3D;AACA,gBAAgB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;AAChD,gBAAgB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACjC,aAAa;AACb,SAAS;AACT,aAAa;AACb,YAAY,MAAM,EAAE,YAAY,EAAE,GAAG,YAAY,EAAE,GAAG,OAAO,CAAC;AAC9D,YAAY,MAAM,IAAI,GAAG,YAAY,CAAC,MAAM,CAAC,MAAM,CAAC;AACpD,gBAAgB,OAAO,EAAE,IAAI,CAAC,OAAO;AACrC,gBAAgB,GAAG,EAAE,IAAI,CAAC,GAAG;AAC7B;AACA;AACA;AACA;AACA;AACA,gBAAgB,OAAO,EAAE,IAAI;AAC7B,gBAAgB,cAAc,EAAE,YAAY;AAC5C,aAAa,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;AAC9B;AACA,YAAY,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;AAC5C,YAAY,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AAC7B,SAAS;AACT;AACA;AACA,QAAQ,MAAM,gBAAgB,GAAG,IAAI,CAAC,WAAW,CAAC;AAClD,QAAQ,gBAAgB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,MAAM,KAAK;AACrD,YAAY,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,CAAC;AACvD,SAAS,CAAC,CAAC;AACX,KAAK;AACL,IAAI,OAAO,QAAQ,CAAC,QAAQ,EAAE;AAC9B,QAAQ,MAAM,mBAAmB,GAAG,cAAc,IAAI,CAAC;AACvD,YAAY,WAAW,CAAC,GAAG,IAAI,EAAE;AACjC,gBAAgB,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;AAC9C,gBAAgB,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;AACpD,oBAAoB,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC;AAC7C,oBAAoB,OAAO;AAC3B,iBAAiB;AACjB,gBAAgB,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,IAAI,QAAQ,CAAC,SAAS;AAClG,sBAAsB;AACtB,wBAAwB,SAAS,EAAE,CAAC,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC;AAC/E,qBAAqB;AACrB,sBAAsB,IAAI,CAAC,CAAC,CAAC;AAC7B,aAAa;AACb,SAAS,CAAC;AACV,QAAQ,OAAO,mBAAmB,CAAC;AACnC,KAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA,IAAI,OAAO,MAAM,CAAC,GAAG,UAAU,EAAE;AACjC,QAAQ,IAAI,EAAE,CAAC;AACf,QAAQ,MAAM,cAAc,GAAG,IAAI,CAAC,OAAO,CAAC;AAC5C,QAAQ,MAAM,UAAU,IAAI,EAAE,GAAG,cAAc,IAAI,CAAC;AACpD,aAAa;AACb,YAAY,EAAE,CAAC,OAAO,GAAG,cAAc,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,MAAM,KAAK,CAAC,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC/G,YAAY,EAAE,CAAC,CAAC;AAChB,QAAQ,OAAO,UAAU,CAAC;AAC1B,KAAK;AACL,CAAC;AACD,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC;AAC1B,OAAO,CAAC,OAAO,GAAG,EAAE,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/core/package.json b/node_modules/@octokit/core/package.json index 6c826a7e..cb354598 100644 --- a/node_modules/@octokit/core/package.json +++ b/node_modules/@octokit/core/package.json @@ -1,7 +1,7 @@ { "name": "@octokit/core", "description": "Extendable client for GitHub's REST & GraphQL APIs", - "version": "3.4.0", + "version": "3.5.1", "license": "MIT", "files": [ "dist-*/", @@ -20,7 +20,7 @@ "dependencies": { "@octokit/auth-token": "^2.4.4", "@octokit/graphql": "^4.5.8", - "@octokit/request": "^5.4.12", + "@octokit/request": "^5.6.0", "@octokit/request-error": "^2.0.5", "@octokit/types": "^6.0.3", "before-after-hook": "^2.2.0", @@ -39,13 +39,13 @@ "@types/node-fetch": "^2.5.0", "fetch-mock": "^9.0.0", "http-proxy-agent": "^4.0.1", - "jest": "^26.1.0", + "jest": "^27.0.0", "lolex": "^6.0.0", - "prettier": "^2.0.4", + "prettier": "2.3.1", "proxy": "^1.0.1", "semantic-release": "^17.0.0", "semantic-release-plugin-update-version-in-files": "^1.0.0", - "ts-jest": "^26.1.3", + "ts-jest": "^27.0.0", "typescript": "^4.0.2" }, "publishConfig": { diff --git a/node_modules/@octokit/endpoint/dist-node/index.js b/node_modules/@octokit/endpoint/dist-node/index.js index 4074d68c..70f24ff7 100644 --- a/node_modules/@octokit/endpoint/dist-node/index.js +++ b/node_modules/@octokit/endpoint/dist-node/index.js @@ -366,7 +366,7 @@ function withDefaults(oldDefaults, newDefaults) { }); } -const VERSION = "6.0.11"; +const VERSION = "6.0.12"; const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. // So we use RequestParameters and add method as additional required property. diff --git a/node_modules/@octokit/endpoint/dist-node/index.js.map b/node_modules/@octokit/endpoint/dist-node/index.js.map index 0783c99d..003e4f27 100644 --- a/node_modules/@octokit/endpoint/dist-node/index.js.map +++ b/node_modules/@octokit/endpoint/dist-node/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/util/lowercase-keys.js","../dist-src/util/merge-deep.js","../dist-src/util/remove-undefined-properties.js","../dist-src/merge.js","../dist-src/util/add-query-parameters.js","../dist-src/util/extract-url-variable-names.js","../dist-src/util/omit.js","../dist-src/util/url-template.js","../dist-src/parse.js","../dist-src/endpoint-with-defaults.js","../dist-src/with-defaults.js","../dist-src/version.js","../dist-src/defaults.js","../dist-src/index.js"],"sourcesContent":["export function lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n","import { isPlainObject } from \"is-plain-object\";\nexport function mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach((key) => {\n if (isPlainObject(options[key])) {\n if (!(key in defaults))\n Object.assign(result, { [key]: options[key] });\n else\n result[key] = mergeDeep(defaults[key], options[key]);\n }\n else {\n Object.assign(result, { [key]: options[key] });\n }\n });\n return result;\n}\n","export function removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n return obj;\n}\n","import { lowercaseKeys } from \"./util/lowercase-keys\";\nimport { mergeDeep } from \"./util/merge-deep\";\nimport { removeUndefinedProperties } from \"./util/remove-undefined-properties\";\nexport function merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? { method, url } : { url: method }, options);\n }\n else {\n options = Object.assign({}, route);\n }\n // lowercase header names before merging with defaults to avoid duplicates\n options.headers = lowercaseKeys(options.headers);\n // remove properties with undefined values before merging\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n // mediaType.previews arrays are merged, instead of overwritten\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews\n .filter((preview) => !mergedOptions.mediaType.previews.includes(preview))\n .concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map((preview) => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n","export function addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return (url +\n separator +\n names\n .map((name) => {\n if (name === \"q\") {\n return (\"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\"));\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n })\n .join(\"&\"));\n}\n","const urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nexport function extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n","export function omit(object, keysToOmit) {\n return Object.keys(object)\n .filter((option) => !keysToOmit.includes(option))\n .reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n","// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str\n .split(/(%[0-9A-Fa-f]{2})/g)\n .map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n })\n .join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value =\n operator === \"+\" || operator === \"#\"\n ? encodeReserved(value)\n : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n }\n else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key], result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" ||\n typeof value === \"number\" ||\n typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n }\n else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n }\n else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n }\n else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n }\n else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n }\n else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n }\n else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n }\n else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n }\n else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nexport function parseUrl(template) {\n return {\n expand: expand.bind(null, template),\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n }\n else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n }\n else {\n return values.join(\",\");\n }\n }\n else {\n return encodeReserved(literal);\n }\n });\n}\n","import { addQueryParameters } from \"./util/add-query-parameters\";\nimport { extractUrlVariableNames } from \"./util/extract-url-variable-names\";\nimport { omit } from \"./util/omit\";\nimport { parseUrl } from \"./util/url-template\";\nexport function parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase();\n // replace :varname with {varname} to make it RFC 6570 compatible\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"mediaType\",\n ]);\n // extract variable names from URL to calculate remaining variables later\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options)\n .filter((option) => urlVariableNames.includes(option))\n .concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept\n .split(/,/)\n .map((preview) => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`))\n .join(\",\");\n }\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader\n .concat(options.mediaType.previews)\n .map((preview) => {\n const format = options.mediaType.format\n ? `.${options.mediaType.format}`\n : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n })\n .join(\",\");\n }\n }\n // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n }\n else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n }\n else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n else {\n headers[\"content-length\"] = 0;\n }\n }\n }\n // default content-type for JSON if body is set\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n // Only return body/request keys if present\n return Object.assign({ method, url, headers }, typeof body !== \"undefined\" ? { body } : null, options.request ? { request: options.request } : null);\n}\n","import { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nexport function endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n","import { endpointWithDefaults } from \"./endpoint-with-defaults\";\nimport { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nexport function withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse,\n });\n}\n","export const VERSION = \"6.0.11\";\n","import { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nconst userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`;\n// DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\nexport const DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent,\n },\n mediaType: {\n format: \"\",\n previews: [],\n },\n};\n","import { withDefaults } from \"./with-defaults\";\nimport { DEFAULTS } from \"./defaults\";\nexport const endpoint = withDefaults(null, DEFAULTS);\n"],"names":["lowercaseKeys","object","Object","keys","reduce","newObj","key","toLowerCase","mergeDeep","defaults","options","result","assign","forEach","isPlainObject","removeUndefinedProperties","obj","undefined","merge","route","method","url","split","headers","mergedOptions","mediaType","previews","length","filter","preview","includes","concat","map","replace","addQueryParameters","parameters","separator","test","names","name","q","encodeURIComponent","join","urlVariableRegex","removeNonChars","variableName","extractUrlVariableNames","matches","match","a","b","omit","keysToOmit","option","encodeReserved","str","part","encodeURI","encodeUnreserved","c","charCodeAt","toString","toUpperCase","encodeValue","operator","value","isDefined","isKeyOperator","getValues","context","modifier","substring","parseInt","push","Array","isArray","k","tmp","parseUrl","template","expand","bind","operators","_","expression","literal","values","indexOf","charAt","substr","variable","exec","parse","body","urlVariableNames","baseUrl","omittedParameters","remainingParameters","isBinaryRequest","accept","format","previewsFromAcceptHeader","data","request","endpointWithDefaults","withDefaults","oldDefaults","newDefaults","DEFAULTS","endpoint","VERSION","userAgent","getUserAgent"],"mappings":";;;;;;;AAAO,SAASA,aAAT,CAAuBC,MAAvB,EAA+B;AAClC,MAAI,CAACA,MAAL,EAAa;AACT,WAAO,EAAP;AACH;;AACD,SAAOC,MAAM,CAACC,IAAP,CAAYF,MAAZ,EAAoBG,MAApB,CAA2B,CAACC,MAAD,EAASC,GAAT,KAAiB;AAC/CD,IAAAA,MAAM,CAACC,GAAG,CAACC,WAAJ,EAAD,CAAN,GAA4BN,MAAM,CAACK,GAAD,CAAlC;AACA,WAAOD,MAAP;AACH,GAHM,EAGJ,EAHI,CAAP;AAIH;;ACPM,SAASG,SAAT,CAAmBC,QAAnB,EAA6BC,OAA7B,EAAsC;AACzC,QAAMC,MAAM,GAAGT,MAAM,CAACU,MAAP,CAAc,EAAd,EAAkBH,QAAlB,CAAf;AACAP,EAAAA,MAAM,CAACC,IAAP,CAAYO,OAAZ,EAAqBG,OAArB,CAA8BP,GAAD,IAAS;AAClC,QAAIQ,2BAAa,CAACJ,OAAO,CAACJ,GAAD,CAAR,CAAjB,EAAiC;AAC7B,UAAI,EAAEA,GAAG,IAAIG,QAAT,CAAJ,EACIP,MAAM,CAACU,MAAP,CAAcD,MAAd,EAAsB;AAAE,SAACL,GAAD,GAAOI,OAAO,CAACJ,GAAD;AAAhB,OAAtB,EADJ,KAGIK,MAAM,CAACL,GAAD,CAAN,GAAcE,SAAS,CAACC,QAAQ,CAACH,GAAD,CAAT,EAAgBI,OAAO,CAACJ,GAAD,CAAvB,CAAvB;AACP,KALD,MAMK;AACDJ,MAAAA,MAAM,CAACU,MAAP,CAAcD,MAAd,EAAsB;AAAE,SAACL,GAAD,GAAOI,OAAO,CAACJ,GAAD;AAAhB,OAAtB;AACH;AACJ,GAVD;AAWA,SAAOK,MAAP;AACH;;ACfM,SAASI,yBAAT,CAAmCC,GAAnC,EAAwC;AAC3C,OAAK,MAAMV,GAAX,IAAkBU,GAAlB,EAAuB;AACnB,QAAIA,GAAG,CAACV,GAAD,CAAH,KAAaW,SAAjB,EAA4B;AACxB,aAAOD,GAAG,CAACV,GAAD,CAAV;AACH;AACJ;;AACD,SAAOU,GAAP;AACH;;ACJM,SAASE,KAAT,CAAeT,QAAf,EAAyBU,KAAzB,EAAgCT,OAAhC,EAAyC;AAC5C,MAAI,OAAOS,KAAP,KAAiB,QAArB,EAA+B;AAC3B,QAAI,CAACC,MAAD,EAASC,GAAT,IAAgBF,KAAK,CAACG,KAAN,CAAY,GAAZ,CAApB;AACAZ,IAAAA,OAAO,GAAGR,MAAM,CAACU,MAAP,CAAcS,GAAG,GAAG;AAAED,MAAAA,MAAF;AAAUC,MAAAA;AAAV,KAAH,GAAqB;AAAEA,MAAAA,GAAG,EAAED;AAAP,KAAtC,EAAuDV,OAAvD,CAAV;AACH,GAHD,MAIK;AACDA,IAAAA,OAAO,GAAGR,MAAM,CAACU,MAAP,CAAc,EAAd,EAAkBO,KAAlB,CAAV;AACH,GAP2C;;;AAS5CT,EAAAA,OAAO,CAACa,OAAR,GAAkBvB,aAAa,CAACU,OAAO,CAACa,OAAT,CAA/B,CAT4C;;AAW5CR,EAAAA,yBAAyB,CAACL,OAAD,CAAzB;AACAK,EAAAA,yBAAyB,CAACL,OAAO,CAACa,OAAT,CAAzB;AACA,QAAMC,aAAa,GAAGhB,SAAS,CAACC,QAAQ,IAAI,EAAb,EAAiBC,OAAjB,CAA/B,CAb4C;;AAe5C,MAAID,QAAQ,IAAIA,QAAQ,CAACgB,SAAT,CAAmBC,QAAnB,CAA4BC,MAA5C,EAAoD;AAChDH,IAAAA,aAAa,CAACC,SAAd,CAAwBC,QAAxB,GAAmCjB,QAAQ,CAACgB,SAAT,CAAmBC,QAAnB,CAC9BE,MAD8B,CACtBC,OAAD,IAAa,CAACL,aAAa,CAACC,SAAd,CAAwBC,QAAxB,CAAiCI,QAAjC,CAA0CD,OAA1C,CADS,EAE9BE,MAF8B,CAEvBP,aAAa,CAACC,SAAd,CAAwBC,QAFD,CAAnC;AAGH;;AACDF,EAAAA,aAAa,CAACC,SAAd,CAAwBC,QAAxB,GAAmCF,aAAa,CAACC,SAAd,CAAwBC,QAAxB,CAAiCM,GAAjC,CAAsCH,OAAD,IAAaA,OAAO,CAACI,OAAR,CAAgB,UAAhB,EAA4B,EAA5B,CAAlD,CAAnC;AACA,SAAOT,aAAP;AACH;;ACzBM,SAASU,kBAAT,CAA4Bb,GAA5B,EAAiCc,UAAjC,EAA6C;AAChD,QAAMC,SAAS,GAAG,KAAKC,IAAL,CAAUhB,GAAV,IAAiB,GAAjB,GAAuB,GAAzC;AACA,QAAMiB,KAAK,GAAGpC,MAAM,CAACC,IAAP,CAAYgC,UAAZ,CAAd;;AACA,MAAIG,KAAK,CAACX,MAAN,KAAiB,CAArB,EAAwB;AACpB,WAAON,GAAP;AACH;;AACD,SAAQA,GAAG,GACPe,SADI,GAEJE,KAAK,CACAN,GADL,CACUO,IAAD,IAAU;AACf,QAAIA,IAAI,KAAK,GAAb,EAAkB;AACd,aAAQ,OAAOJ,UAAU,CAACK,CAAX,CAAalB,KAAb,CAAmB,GAAnB,EAAwBU,GAAxB,CAA4BS,kBAA5B,EAAgDC,IAAhD,CAAqD,GAArD,CAAf;AACH;;AACD,WAAQ,GAAEH,IAAK,IAAGE,kBAAkB,CAACN,UAAU,CAACI,IAAD,CAAX,CAAmB,EAAvD;AACH,GAND,EAOKG,IAPL,CAOU,GAPV,CAFJ;AAUH;;AChBD,MAAMC,gBAAgB,GAAG,YAAzB;;AACA,SAASC,cAAT,CAAwBC,YAAxB,EAAsC;AAClC,SAAOA,YAAY,CAACZ,OAAb,CAAqB,YAArB,EAAmC,EAAnC,EAAuCX,KAAvC,CAA6C,GAA7C,CAAP;AACH;;AACD,AAAO,SAASwB,uBAAT,CAAiCzB,GAAjC,EAAsC;AACzC,QAAM0B,OAAO,GAAG1B,GAAG,CAAC2B,KAAJ,CAAUL,gBAAV,CAAhB;;AACA,MAAI,CAACI,OAAL,EAAc;AACV,WAAO,EAAP;AACH;;AACD,SAAOA,OAAO,CAACf,GAAR,CAAYY,cAAZ,EAA4BxC,MAA5B,CAAmC,CAAC6C,CAAD,EAAIC,CAAJ,KAAUD,CAAC,CAAClB,MAAF,CAASmB,CAAT,CAA7C,EAA0D,EAA1D,CAAP;AACH;;ACVM,SAASC,IAAT,CAAclD,MAAd,EAAsBmD,UAAtB,EAAkC;AACrC,SAAOlD,MAAM,CAACC,IAAP,CAAYF,MAAZ,EACF2B,MADE,CACMyB,MAAD,IAAY,CAACD,UAAU,CAACtB,QAAX,CAAoBuB,MAApB,CADlB,EAEFjD,MAFE,CAEK,CAACY,GAAD,EAAMV,GAAN,KAAc;AACtBU,IAAAA,GAAG,CAACV,GAAD,CAAH,GAAWL,MAAM,CAACK,GAAD,CAAjB;AACA,WAAOU,GAAP;AACH,GALM,EAKJ,EALI,CAAP;AAMH;;ACPD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AACA;AACA,SAASsC,cAAT,CAAwBC,GAAxB,EAA6B;AACzB,SAAOA,GAAG,CACLjC,KADE,CACI,oBADJ,EAEFU,GAFE,CAEE,UAAUwB,IAAV,EAAgB;AACrB,QAAI,CAAC,eAAenB,IAAf,CAAoBmB,IAApB,CAAL,EAAgC;AAC5BA,MAAAA,IAAI,GAAGC,SAAS,CAACD,IAAD,CAAT,CAAgBvB,OAAhB,CAAwB,MAAxB,EAAgC,GAAhC,EAAqCA,OAArC,CAA6C,MAA7C,EAAqD,GAArD,CAAP;AACH;;AACD,WAAOuB,IAAP;AACH,GAPM,EAQFd,IARE,CAQG,EARH,CAAP;AASH;;AACD,SAASgB,gBAAT,CAA0BH,GAA1B,EAA+B;AAC3B,SAAOd,kBAAkB,CAACc,GAAD,CAAlB,CAAwBtB,OAAxB,CAAgC,UAAhC,EAA4C,UAAU0B,CAAV,EAAa;AAC5D,WAAO,MAAMA,CAAC,CAACC,UAAF,CAAa,CAAb,EAAgBC,QAAhB,CAAyB,EAAzB,EAA6BC,WAA7B,EAAb;AACH,GAFM,CAAP;AAGH;;AACD,SAASC,WAAT,CAAqBC,QAArB,EAA+BC,KAA/B,EAAsC3D,GAAtC,EAA2C;AACvC2D,EAAAA,KAAK,GACDD,QAAQ,KAAK,GAAb,IAAoBA,QAAQ,KAAK,GAAjC,GACMV,cAAc,CAACW,KAAD,CADpB,GAEMP,gBAAgB,CAACO,KAAD,CAH1B;;AAIA,MAAI3D,GAAJ,EAAS;AACL,WAAOoD,gBAAgB,CAACpD,GAAD,CAAhB,GAAwB,GAAxB,GAA8B2D,KAArC;AACH,GAFD,MAGK;AACD,WAAOA,KAAP;AACH;AACJ;;AACD,SAASC,SAAT,CAAmBD,KAAnB,EAA0B;AACtB,SAAOA,KAAK,KAAKhD,SAAV,IAAuBgD,KAAK,KAAK,IAAxC;AACH;;AACD,SAASE,aAAT,CAAuBH,QAAvB,EAAiC;AAC7B,SAAOA,QAAQ,KAAK,GAAb,IAAoBA,QAAQ,KAAK,GAAjC,IAAwCA,QAAQ,KAAK,GAA5D;AACH;;AACD,SAASI,SAAT,CAAmBC,OAAnB,EAA4BL,QAA5B,EAAsC1D,GAAtC,EAA2CgE,QAA3C,EAAqD;AACjD,MAAIL,KAAK,GAAGI,OAAO,CAAC/D,GAAD,CAAnB;AAAA,MAA0BK,MAAM,GAAG,EAAnC;;AACA,MAAIuD,SAAS,CAACD,KAAD,CAAT,IAAoBA,KAAK,KAAK,EAAlC,EAAsC;AAClC,QAAI,OAAOA,KAAP,KAAiB,QAAjB,IACA,OAAOA,KAAP,KAAiB,QADjB,IAEA,OAAOA,KAAP,KAAiB,SAFrB,EAEgC;AAC5BA,MAAAA,KAAK,GAAGA,KAAK,CAACJ,QAAN,EAAR;;AACA,UAAIS,QAAQ,IAAIA,QAAQ,KAAK,GAA7B,EAAkC;AAC9BL,QAAAA,KAAK,GAAGA,KAAK,CAACM,SAAN,CAAgB,CAAhB,EAAmBC,QAAQ,CAACF,QAAD,EAAW,EAAX,CAA3B,CAAR;AACH;;AACD3D,MAAAA,MAAM,CAAC8D,IAAP,CAAYV,WAAW,CAACC,QAAD,EAAWC,KAAX,EAAkBE,aAAa,CAACH,QAAD,CAAb,GAA0B1D,GAA1B,GAAgC,EAAlD,CAAvB;AACH,KARD,MASK;AACD,UAAIgE,QAAQ,KAAK,GAAjB,EAAsB;AAClB,YAAII,KAAK,CAACC,OAAN,CAAcV,KAAd,CAAJ,EAA0B;AACtBA,UAAAA,KAAK,CAACrC,MAAN,CAAasC,SAAb,EAAwBrD,OAAxB,CAAgC,UAAUoD,KAAV,EAAiB;AAC7CtD,YAAAA,MAAM,CAAC8D,IAAP,CAAYV,WAAW,CAACC,QAAD,EAAWC,KAAX,EAAkBE,aAAa,CAACH,QAAD,CAAb,GAA0B1D,GAA1B,GAAgC,EAAlD,CAAvB;AACH,WAFD;AAGH,SAJD,MAKK;AACDJ,UAAAA,MAAM,CAACC,IAAP,CAAY8D,KAAZ,EAAmBpD,OAAnB,CAA2B,UAAU+D,CAAV,EAAa;AACpC,gBAAIV,SAAS,CAACD,KAAK,CAACW,CAAD,CAAN,CAAb,EAAyB;AACrBjE,cAAAA,MAAM,CAAC8D,IAAP,CAAYV,WAAW,CAACC,QAAD,EAAWC,KAAK,CAACW,CAAD,CAAhB,EAAqBA,CAArB,CAAvB;AACH;AACJ,WAJD;AAKH;AACJ,OAbD,MAcK;AACD,cAAMC,GAAG,GAAG,EAAZ;;AACA,YAAIH,KAAK,CAACC,OAAN,CAAcV,KAAd,CAAJ,EAA0B;AACtBA,UAAAA,KAAK,CAACrC,MAAN,CAAasC,SAAb,EAAwBrD,OAAxB,CAAgC,UAAUoD,KAAV,EAAiB;AAC7CY,YAAAA,GAAG,CAACJ,IAAJ,CAASV,WAAW,CAACC,QAAD,EAAWC,KAAX,CAApB;AACH,WAFD;AAGH,SAJD,MAKK;AACD/D,UAAAA,MAAM,CAACC,IAAP,CAAY8D,KAAZ,EAAmBpD,OAAnB,CAA2B,UAAU+D,CAAV,EAAa;AACpC,gBAAIV,SAAS,CAACD,KAAK,CAACW,CAAD,CAAN,CAAb,EAAyB;AACrBC,cAAAA,GAAG,CAACJ,IAAJ,CAASf,gBAAgB,CAACkB,CAAD,CAAzB;AACAC,cAAAA,GAAG,CAACJ,IAAJ,CAASV,WAAW,CAACC,QAAD,EAAWC,KAAK,CAACW,CAAD,CAAL,CAASf,QAAT,EAAX,CAApB;AACH;AACJ,WALD;AAMH;;AACD,YAAIM,aAAa,CAACH,QAAD,CAAjB,EAA6B;AACzBrD,UAAAA,MAAM,CAAC8D,IAAP,CAAYf,gBAAgB,CAACpD,GAAD,CAAhB,GAAwB,GAAxB,GAA8BuE,GAAG,CAACnC,IAAJ,CAAS,GAAT,CAA1C;AACH,SAFD,MAGK,IAAImC,GAAG,CAAClD,MAAJ,KAAe,CAAnB,EAAsB;AACvBhB,UAAAA,MAAM,CAAC8D,IAAP,CAAYI,GAAG,CAACnC,IAAJ,CAAS,GAAT,CAAZ;AACH;AACJ;AACJ;AACJ,GAhDD,MAiDK;AACD,QAAIsB,QAAQ,KAAK,GAAjB,EAAsB;AAClB,UAAIE,SAAS,CAACD,KAAD,CAAb,EAAsB;AAClBtD,QAAAA,MAAM,CAAC8D,IAAP,CAAYf,gBAAgB,CAACpD,GAAD,CAA5B;AACH;AACJ,KAJD,MAKK,IAAI2D,KAAK,KAAK,EAAV,KAAiBD,QAAQ,KAAK,GAAb,IAAoBA,QAAQ,KAAK,GAAlD,CAAJ,EAA4D;AAC7DrD,MAAAA,MAAM,CAAC8D,IAAP,CAAYf,gBAAgB,CAACpD,GAAD,CAAhB,GAAwB,GAApC;AACH,KAFI,MAGA,IAAI2D,KAAK,KAAK,EAAd,EAAkB;AACnBtD,MAAAA,MAAM,CAAC8D,IAAP,CAAY,EAAZ;AACH;AACJ;;AACD,SAAO9D,MAAP;AACH;;AACD,AAAO,SAASmE,QAAT,CAAkBC,QAAlB,EAA4B;AAC/B,SAAO;AACHC,IAAAA,MAAM,EAAEA,MAAM,CAACC,IAAP,CAAY,IAAZ,EAAkBF,QAAlB;AADL,GAAP;AAGH;;AACD,SAASC,MAAT,CAAgBD,QAAhB,EAA0BV,OAA1B,EAAmC;AAC/B,MAAIa,SAAS,GAAG,CAAC,GAAD,EAAM,GAAN,EAAW,GAAX,EAAgB,GAAhB,EAAqB,GAArB,EAA0B,GAA1B,EAA+B,GAA/B,CAAhB;AACA,SAAOH,QAAQ,CAAC9C,OAAT,CAAiB,4BAAjB,EAA+C,UAAUkD,CAAV,EAAaC,UAAb,EAAyBC,OAAzB,EAAkC;AACpF,QAAID,UAAJ,EAAgB;AACZ,UAAIpB,QAAQ,GAAG,EAAf;AACA,YAAMsB,MAAM,GAAG,EAAf;;AACA,UAAIJ,SAAS,CAACK,OAAV,CAAkBH,UAAU,CAACI,MAAX,CAAkB,CAAlB,CAAlB,MAA4C,CAAC,CAAjD,EAAoD;AAChDxB,QAAAA,QAAQ,GAAGoB,UAAU,CAACI,MAAX,CAAkB,CAAlB,CAAX;AACAJ,QAAAA,UAAU,GAAGA,UAAU,CAACK,MAAX,CAAkB,CAAlB,CAAb;AACH;;AACDL,MAAAA,UAAU,CAAC9D,KAAX,CAAiB,IAAjB,EAAuBT,OAAvB,CAA+B,UAAU6E,QAAV,EAAoB;AAC/C,YAAIb,GAAG,GAAG,4BAA4Bc,IAA5B,CAAiCD,QAAjC,CAAV;AACAJ,QAAAA,MAAM,CAACb,IAAP,CAAYL,SAAS,CAACC,OAAD,EAAUL,QAAV,EAAoBa,GAAG,CAAC,CAAD,CAAvB,EAA4BA,GAAG,CAAC,CAAD,CAAH,IAAUA,GAAG,CAAC,CAAD,CAAzC,CAArB;AACH,OAHD;;AAIA,UAAIb,QAAQ,IAAIA,QAAQ,KAAK,GAA7B,EAAkC;AAC9B,YAAI5B,SAAS,GAAG,GAAhB;;AACA,YAAI4B,QAAQ,KAAK,GAAjB,EAAsB;AAClB5B,UAAAA,SAAS,GAAG,GAAZ;AACH,SAFD,MAGK,IAAI4B,QAAQ,KAAK,GAAjB,EAAsB;AACvB5B,UAAAA,SAAS,GAAG4B,QAAZ;AACH;;AACD,eAAO,CAACsB,MAAM,CAAC3D,MAAP,KAAkB,CAAlB,GAAsBqC,QAAtB,GAAiC,EAAlC,IAAwCsB,MAAM,CAAC5C,IAAP,CAAYN,SAAZ,CAA/C;AACH,OATD,MAUK;AACD,eAAOkD,MAAM,CAAC5C,IAAP,CAAY,GAAZ,CAAP;AACH;AACJ,KAxBD,MAyBK;AACD,aAAOY,cAAc,CAAC+B,OAAD,CAArB;AACH;AACJ,GA7BM,CAAP;AA8BH;;AC/JM,SAASO,KAAT,CAAelF,OAAf,EAAwB;AAC3B;AACA,MAAIU,MAAM,GAAGV,OAAO,CAACU,MAAR,CAAe0C,WAAf,EAAb,CAF2B;;AAI3B,MAAIzC,GAAG,GAAG,CAACX,OAAO,CAACW,GAAR,IAAe,GAAhB,EAAqBY,OAArB,CAA6B,cAA7B,EAA6C,MAA7C,CAAV;AACA,MAAIV,OAAO,GAAGrB,MAAM,CAACU,MAAP,CAAc,EAAd,EAAkBF,OAAO,CAACa,OAA1B,CAAd;AACA,MAAIsE,IAAJ;AACA,MAAI1D,UAAU,GAAGgB,IAAI,CAACzC,OAAD,EAAU,CAC3B,QAD2B,EAE3B,SAF2B,EAG3B,KAH2B,EAI3B,SAJ2B,EAK3B,SAL2B,EAM3B,WAN2B,CAAV,CAArB,CAP2B;;AAgB3B,QAAMoF,gBAAgB,GAAGhD,uBAAuB,CAACzB,GAAD,CAAhD;AACAA,EAAAA,GAAG,GAAGyD,QAAQ,CAACzD,GAAD,CAAR,CAAc2D,MAAd,CAAqB7C,UAArB,CAAN;;AACA,MAAI,CAAC,QAAQE,IAAR,CAAahB,GAAb,CAAL,EAAwB;AACpBA,IAAAA,GAAG,GAAGX,OAAO,CAACqF,OAAR,GAAkB1E,GAAxB;AACH;;AACD,QAAM2E,iBAAiB,GAAG9F,MAAM,CAACC,IAAP,CAAYO,OAAZ,EACrBkB,MADqB,CACbyB,MAAD,IAAYyC,gBAAgB,CAAChE,QAAjB,CAA0BuB,MAA1B,CADE,EAErBtB,MAFqB,CAEd,SAFc,CAA1B;AAGA,QAAMkE,mBAAmB,GAAG9C,IAAI,CAAChB,UAAD,EAAa6D,iBAAb,CAAhC;AACA,QAAME,eAAe,GAAG,6BAA6B7D,IAA7B,CAAkCd,OAAO,CAAC4E,MAA1C,CAAxB;;AACA,MAAI,CAACD,eAAL,EAAsB;AAClB,QAAIxF,OAAO,CAACe,SAAR,CAAkB2E,MAAtB,EAA8B;AAC1B;AACA7E,MAAAA,OAAO,CAAC4E,MAAR,GAAiB5E,OAAO,CAAC4E,MAAR,CACZ7E,KADY,CACN,GADM,EAEZU,GAFY,CAEPH,OAAD,IAAaA,OAAO,CAACI,OAAR,CAAgB,kDAAhB,EAAqE,uBAAsBvB,OAAO,CAACe,SAAR,CAAkB2E,MAAO,EAApH,CAFL,EAGZ1D,IAHY,CAGP,GAHO,CAAjB;AAIH;;AACD,QAAIhC,OAAO,CAACe,SAAR,CAAkBC,QAAlB,CAA2BC,MAA/B,EAAuC;AACnC,YAAM0E,wBAAwB,GAAG9E,OAAO,CAAC4E,MAAR,CAAenD,KAAf,CAAqB,qBAArB,KAA+C,EAAhF;AACAzB,MAAAA,OAAO,CAAC4E,MAAR,GAAiBE,wBAAwB,CACpCtE,MADY,CACLrB,OAAO,CAACe,SAAR,CAAkBC,QADb,EAEZM,GAFY,CAEPH,OAAD,IAAa;AAClB,cAAMuE,MAAM,GAAG1F,OAAO,CAACe,SAAR,CAAkB2E,MAAlB,GACR,IAAG1F,OAAO,CAACe,SAAR,CAAkB2E,MAAO,EADpB,GAET,OAFN;AAGA,eAAQ,0BAAyBvE,OAAQ,WAAUuE,MAAO,EAA1D;AACH,OAPgB,EAQZ1D,IARY,CAQP,GARO,CAAjB;AASH;AACJ,GA9C0B;AAgD3B;;;AACA,MAAI,CAAC,KAAD,EAAQ,MAAR,EAAgBZ,QAAhB,CAAyBV,MAAzB,CAAJ,EAAsC;AAClCC,IAAAA,GAAG,GAAGa,kBAAkB,CAACb,GAAD,EAAM4E,mBAAN,CAAxB;AACH,GAFD,MAGK;AACD,QAAI,UAAUA,mBAAd,EAAmC;AAC/BJ,MAAAA,IAAI,GAAGI,mBAAmB,CAACK,IAA3B;AACH,KAFD,MAGK;AACD,UAAIpG,MAAM,CAACC,IAAP,CAAY8F,mBAAZ,EAAiCtE,MAArC,EAA6C;AACzCkE,QAAAA,IAAI,GAAGI,mBAAP;AACH,OAFD,MAGK;AACD1E,QAAAA,OAAO,CAAC,gBAAD,CAAP,GAA4B,CAA5B;AACH;AACJ;AACJ,GAhE0B;;;AAkE3B,MAAI,CAACA,OAAO,CAAC,cAAD,CAAR,IAA4B,OAAOsE,IAAP,KAAgB,WAAhD,EAA6D;AACzDtE,IAAAA,OAAO,CAAC,cAAD,CAAP,GAA0B,iCAA1B;AACH,GApE0B;AAsE3B;;;AACA,MAAI,CAAC,OAAD,EAAU,KAAV,EAAiBO,QAAjB,CAA0BV,MAA1B,KAAqC,OAAOyE,IAAP,KAAgB,WAAzD,EAAsE;AAClEA,IAAAA,IAAI,GAAG,EAAP;AACH,GAzE0B;;;AA2E3B,SAAO3F,MAAM,CAACU,MAAP,CAAc;AAAEQ,IAAAA,MAAF;AAAUC,IAAAA,GAAV;AAAeE,IAAAA;AAAf,GAAd,EAAwC,OAAOsE,IAAP,KAAgB,WAAhB,GAA8B;AAAEA,IAAAA;AAAF,GAA9B,GAAyC,IAAjF,EAAuFnF,OAAO,CAAC6F,OAAR,GAAkB;AAAEA,IAAAA,OAAO,EAAE7F,OAAO,CAAC6F;AAAnB,GAAlB,GAAiD,IAAxI,CAAP;AACH;;AC9EM,SAASC,oBAAT,CAA8B/F,QAA9B,EAAwCU,KAAxC,EAA+CT,OAA/C,EAAwD;AAC3D,SAAOkF,KAAK,CAAC1E,KAAK,CAACT,QAAD,EAAWU,KAAX,EAAkBT,OAAlB,CAAN,CAAZ;AACH;;ACDM,SAAS+F,YAAT,CAAsBC,WAAtB,EAAmCC,WAAnC,EAAgD;AACnD,QAAMC,QAAQ,GAAG1F,KAAK,CAACwF,WAAD,EAAcC,WAAd,CAAtB;AACA,QAAME,QAAQ,GAAGL,oBAAoB,CAACvB,IAArB,CAA0B,IAA1B,EAAgC2B,QAAhC,CAAjB;AACA,SAAO1G,MAAM,CAACU,MAAP,CAAciG,QAAd,EAAwB;AAC3BD,IAAAA,QAD2B;AAE3BnG,IAAAA,QAAQ,EAAEgG,YAAY,CAACxB,IAAb,CAAkB,IAAlB,EAAwB2B,QAAxB,CAFiB;AAG3B1F,IAAAA,KAAK,EAAEA,KAAK,CAAC+D,IAAN,CAAW,IAAX,EAAiB2B,QAAjB,CAHoB;AAI3BhB,IAAAA;AAJ2B,GAAxB,CAAP;AAMH;;ACZM,MAAMkB,OAAO,GAAG,mBAAhB;;ACEP,MAAMC,SAAS,GAAI,uBAAsBD,OAAQ,IAAGE,+BAAY,EAAG,EAAnE;AAEA;;AACA,AAAO,MAAMJ,QAAQ,GAAG;AACpBxF,EAAAA,MAAM,EAAE,KADY;AAEpB2E,EAAAA,OAAO,EAAE,wBAFW;AAGpBxE,EAAAA,OAAO,EAAE;AACL4E,IAAAA,MAAM,EAAE,gCADH;AAEL,kBAAcY;AAFT,GAHW;AAOpBtF,EAAAA,SAAS,EAAE;AACP2E,IAAAA,MAAM,EAAE,EADD;AAEP1E,IAAAA,QAAQ,EAAE;AAFH;AAPS,CAAjB;;MCHMmF,QAAQ,GAAGJ,YAAY,CAAC,IAAD,EAAOG,QAAP,CAA7B;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/util/lowercase-keys.js","../dist-src/util/merge-deep.js","../dist-src/util/remove-undefined-properties.js","../dist-src/merge.js","../dist-src/util/add-query-parameters.js","../dist-src/util/extract-url-variable-names.js","../dist-src/util/omit.js","../dist-src/util/url-template.js","../dist-src/parse.js","../dist-src/endpoint-with-defaults.js","../dist-src/with-defaults.js","../dist-src/version.js","../dist-src/defaults.js","../dist-src/index.js"],"sourcesContent":["export function lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n","import { isPlainObject } from \"is-plain-object\";\nexport function mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach((key) => {\n if (isPlainObject(options[key])) {\n if (!(key in defaults))\n Object.assign(result, { [key]: options[key] });\n else\n result[key] = mergeDeep(defaults[key], options[key]);\n }\n else {\n Object.assign(result, { [key]: options[key] });\n }\n });\n return result;\n}\n","export function removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n return obj;\n}\n","import { lowercaseKeys } from \"./util/lowercase-keys\";\nimport { mergeDeep } from \"./util/merge-deep\";\nimport { removeUndefinedProperties } from \"./util/remove-undefined-properties\";\nexport function merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? { method, url } : { url: method }, options);\n }\n else {\n options = Object.assign({}, route);\n }\n // lowercase header names before merging with defaults to avoid duplicates\n options.headers = lowercaseKeys(options.headers);\n // remove properties with undefined values before merging\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n // mediaType.previews arrays are merged, instead of overwritten\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews\n .filter((preview) => !mergedOptions.mediaType.previews.includes(preview))\n .concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map((preview) => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n","export function addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return (url +\n separator +\n names\n .map((name) => {\n if (name === \"q\") {\n return (\"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\"));\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n })\n .join(\"&\"));\n}\n","const urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nexport function extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n","export function omit(object, keysToOmit) {\n return Object.keys(object)\n .filter((option) => !keysToOmit.includes(option))\n .reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n","// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str\n .split(/(%[0-9A-Fa-f]{2})/g)\n .map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n })\n .join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value =\n operator === \"+\" || operator === \"#\"\n ? encodeReserved(value)\n : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n }\n else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key], result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" ||\n typeof value === \"number\" ||\n typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n }\n else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n }\n else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n }\n else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n }\n else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n }\n else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n }\n else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n }\n else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n }\n else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nexport function parseUrl(template) {\n return {\n expand: expand.bind(null, template),\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n }\n else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n }\n else {\n return values.join(\",\");\n }\n }\n else {\n return encodeReserved(literal);\n }\n });\n}\n","import { addQueryParameters } from \"./util/add-query-parameters\";\nimport { extractUrlVariableNames } from \"./util/extract-url-variable-names\";\nimport { omit } from \"./util/omit\";\nimport { parseUrl } from \"./util/url-template\";\nexport function parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase();\n // replace :varname with {varname} to make it RFC 6570 compatible\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"mediaType\",\n ]);\n // extract variable names from URL to calculate remaining variables later\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options)\n .filter((option) => urlVariableNames.includes(option))\n .concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept\n .split(/,/)\n .map((preview) => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`))\n .join(\",\");\n }\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader\n .concat(options.mediaType.previews)\n .map((preview) => {\n const format = options.mediaType.format\n ? `.${options.mediaType.format}`\n : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n })\n .join(\",\");\n }\n }\n // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n }\n else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n }\n else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n else {\n headers[\"content-length\"] = 0;\n }\n }\n }\n // default content-type for JSON if body is set\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n // Only return body/request keys if present\n return Object.assign({ method, url, headers }, typeof body !== \"undefined\" ? { body } : null, options.request ? { request: options.request } : null);\n}\n","import { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nexport function endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n","import { endpointWithDefaults } from \"./endpoint-with-defaults\";\nimport { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nexport function withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse,\n });\n}\n","export const VERSION = \"6.0.12\";\n","import { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nconst userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`;\n// DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\nexport const DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent,\n },\n mediaType: {\n format: \"\",\n previews: [],\n },\n};\n","import { withDefaults } from \"./with-defaults\";\nimport { DEFAULTS } from \"./defaults\";\nexport const endpoint = withDefaults(null, DEFAULTS);\n"],"names":["lowercaseKeys","object","Object","keys","reduce","newObj","key","toLowerCase","mergeDeep","defaults","options","result","assign","forEach","isPlainObject","removeUndefinedProperties","obj","undefined","merge","route","method","url","split","headers","mergedOptions","mediaType","previews","length","filter","preview","includes","concat","map","replace","addQueryParameters","parameters","separator","test","names","name","q","encodeURIComponent","join","urlVariableRegex","removeNonChars","variableName","extractUrlVariableNames","matches","match","a","b","omit","keysToOmit","option","encodeReserved","str","part","encodeURI","encodeUnreserved","c","charCodeAt","toString","toUpperCase","encodeValue","operator","value","isDefined","isKeyOperator","getValues","context","modifier","substring","parseInt","push","Array","isArray","k","tmp","parseUrl","template","expand","bind","operators","_","expression","literal","values","indexOf","charAt","substr","variable","exec","parse","body","urlVariableNames","baseUrl","omittedParameters","remainingParameters","isBinaryRequest","accept","format","previewsFromAcceptHeader","data","request","endpointWithDefaults","withDefaults","oldDefaults","newDefaults","DEFAULTS","endpoint","VERSION","userAgent","getUserAgent"],"mappings":";;;;;;;AAAO,SAASA,aAAT,CAAuBC,MAAvB,EAA+B;AAClC,MAAI,CAACA,MAAL,EAAa;AACT,WAAO,EAAP;AACH;;AACD,SAAOC,MAAM,CAACC,IAAP,CAAYF,MAAZ,EAAoBG,MAApB,CAA2B,CAACC,MAAD,EAASC,GAAT,KAAiB;AAC/CD,IAAAA,MAAM,CAACC,GAAG,CAACC,WAAJ,EAAD,CAAN,GAA4BN,MAAM,CAACK,GAAD,CAAlC;AACA,WAAOD,MAAP;AACH,GAHM,EAGJ,EAHI,CAAP;AAIH;;ACPM,SAASG,SAAT,CAAmBC,QAAnB,EAA6BC,OAA7B,EAAsC;AACzC,QAAMC,MAAM,GAAGT,MAAM,CAACU,MAAP,CAAc,EAAd,EAAkBH,QAAlB,CAAf;AACAP,EAAAA,MAAM,CAACC,IAAP,CAAYO,OAAZ,EAAqBG,OAArB,CAA8BP,GAAD,IAAS;AAClC,QAAIQ,2BAAa,CAACJ,OAAO,CAACJ,GAAD,CAAR,CAAjB,EAAiC;AAC7B,UAAI,EAAEA,GAAG,IAAIG,QAAT,CAAJ,EACIP,MAAM,CAACU,MAAP,CAAcD,MAAd,EAAsB;AAAE,SAACL,GAAD,GAAOI,OAAO,CAACJ,GAAD;AAAhB,OAAtB,EADJ,KAGIK,MAAM,CAACL,GAAD,CAAN,GAAcE,SAAS,CAACC,QAAQ,CAACH,GAAD,CAAT,EAAgBI,OAAO,CAACJ,GAAD,CAAvB,CAAvB;AACP,KALD,MAMK;AACDJ,MAAAA,MAAM,CAACU,MAAP,CAAcD,MAAd,EAAsB;AAAE,SAACL,GAAD,GAAOI,OAAO,CAACJ,GAAD;AAAhB,OAAtB;AACH;AACJ,GAVD;AAWA,SAAOK,MAAP;AACH;;ACfM,SAASI,yBAAT,CAAmCC,GAAnC,EAAwC;AAC3C,OAAK,MAAMV,GAAX,IAAkBU,GAAlB,EAAuB;AACnB,QAAIA,GAAG,CAACV,GAAD,CAAH,KAAaW,SAAjB,EAA4B;AACxB,aAAOD,GAAG,CAACV,GAAD,CAAV;AACH;AACJ;;AACD,SAAOU,GAAP;AACH;;ACJM,SAASE,KAAT,CAAeT,QAAf,EAAyBU,KAAzB,EAAgCT,OAAhC,EAAyC;AAC5C,MAAI,OAAOS,KAAP,KAAiB,QAArB,EAA+B;AAC3B,QAAI,CAACC,MAAD,EAASC,GAAT,IAAgBF,KAAK,CAACG,KAAN,CAAY,GAAZ,CAApB;AACAZ,IAAAA,OAAO,GAAGR,MAAM,CAACU,MAAP,CAAcS,GAAG,GAAG;AAAED,MAAAA,MAAF;AAAUC,MAAAA;AAAV,KAAH,GAAqB;AAAEA,MAAAA,GAAG,EAAED;AAAP,KAAtC,EAAuDV,OAAvD,CAAV;AACH,GAHD,MAIK;AACDA,IAAAA,OAAO,GAAGR,MAAM,CAACU,MAAP,CAAc,EAAd,EAAkBO,KAAlB,CAAV;AACH,GAP2C;;;AAS5CT,EAAAA,OAAO,CAACa,OAAR,GAAkBvB,aAAa,CAACU,OAAO,CAACa,OAAT,CAA/B,CAT4C;;AAW5CR,EAAAA,yBAAyB,CAACL,OAAD,CAAzB;AACAK,EAAAA,yBAAyB,CAACL,OAAO,CAACa,OAAT,CAAzB;AACA,QAAMC,aAAa,GAAGhB,SAAS,CAACC,QAAQ,IAAI,EAAb,EAAiBC,OAAjB,CAA/B,CAb4C;;AAe5C,MAAID,QAAQ,IAAIA,QAAQ,CAACgB,SAAT,CAAmBC,QAAnB,CAA4BC,MAA5C,EAAoD;AAChDH,IAAAA,aAAa,CAACC,SAAd,CAAwBC,QAAxB,GAAmCjB,QAAQ,CAACgB,SAAT,CAAmBC,QAAnB,CAC9BE,MAD8B,CACtBC,OAAD,IAAa,CAACL,aAAa,CAACC,SAAd,CAAwBC,QAAxB,CAAiCI,QAAjC,CAA0CD,OAA1C,CADS,EAE9BE,MAF8B,CAEvBP,aAAa,CAACC,SAAd,CAAwBC,QAFD,CAAnC;AAGH;;AACDF,EAAAA,aAAa,CAACC,SAAd,CAAwBC,QAAxB,GAAmCF,aAAa,CAACC,SAAd,CAAwBC,QAAxB,CAAiCM,GAAjC,CAAsCH,OAAD,IAAaA,OAAO,CAACI,OAAR,CAAgB,UAAhB,EAA4B,EAA5B,CAAlD,CAAnC;AACA,SAAOT,aAAP;AACH;;ACzBM,SAASU,kBAAT,CAA4Bb,GAA5B,EAAiCc,UAAjC,EAA6C;AAChD,QAAMC,SAAS,GAAG,KAAKC,IAAL,CAAUhB,GAAV,IAAiB,GAAjB,GAAuB,GAAzC;AACA,QAAMiB,KAAK,GAAGpC,MAAM,CAACC,IAAP,CAAYgC,UAAZ,CAAd;;AACA,MAAIG,KAAK,CAACX,MAAN,KAAiB,CAArB,EAAwB;AACpB,WAAON,GAAP;AACH;;AACD,SAAQA,GAAG,GACPe,SADI,GAEJE,KAAK,CACAN,GADL,CACUO,IAAD,IAAU;AACf,QAAIA,IAAI,KAAK,GAAb,EAAkB;AACd,aAAQ,OAAOJ,UAAU,CAACK,CAAX,CAAalB,KAAb,CAAmB,GAAnB,EAAwBU,GAAxB,CAA4BS,kBAA5B,EAAgDC,IAAhD,CAAqD,GAArD,CAAf;AACH;;AACD,WAAQ,GAAEH,IAAK,IAAGE,kBAAkB,CAACN,UAAU,CAACI,IAAD,CAAX,CAAmB,EAAvD;AACH,GAND,EAOKG,IAPL,CAOU,GAPV,CAFJ;AAUH;;AChBD,MAAMC,gBAAgB,GAAG,YAAzB;;AACA,SAASC,cAAT,CAAwBC,YAAxB,EAAsC;AAClC,SAAOA,YAAY,CAACZ,OAAb,CAAqB,YAArB,EAAmC,EAAnC,EAAuCX,KAAvC,CAA6C,GAA7C,CAAP;AACH;;AACD,AAAO,SAASwB,uBAAT,CAAiCzB,GAAjC,EAAsC;AACzC,QAAM0B,OAAO,GAAG1B,GAAG,CAAC2B,KAAJ,CAAUL,gBAAV,CAAhB;;AACA,MAAI,CAACI,OAAL,EAAc;AACV,WAAO,EAAP;AACH;;AACD,SAAOA,OAAO,CAACf,GAAR,CAAYY,cAAZ,EAA4BxC,MAA5B,CAAmC,CAAC6C,CAAD,EAAIC,CAAJ,KAAUD,CAAC,CAAClB,MAAF,CAASmB,CAAT,CAA7C,EAA0D,EAA1D,CAAP;AACH;;ACVM,SAASC,IAAT,CAAclD,MAAd,EAAsBmD,UAAtB,EAAkC;AACrC,SAAOlD,MAAM,CAACC,IAAP,CAAYF,MAAZ,EACF2B,MADE,CACMyB,MAAD,IAAY,CAACD,UAAU,CAACtB,QAAX,CAAoBuB,MAApB,CADlB,EAEFjD,MAFE,CAEK,CAACY,GAAD,EAAMV,GAAN,KAAc;AACtBU,IAAAA,GAAG,CAACV,GAAD,CAAH,GAAWL,MAAM,CAACK,GAAD,CAAjB;AACA,WAAOU,GAAP;AACH,GALM,EAKJ,EALI,CAAP;AAMH;;ACPD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AACA;AACA,SAASsC,cAAT,CAAwBC,GAAxB,EAA6B;AACzB,SAAOA,GAAG,CACLjC,KADE,CACI,oBADJ,EAEFU,GAFE,CAEE,UAAUwB,IAAV,EAAgB;AACrB,QAAI,CAAC,eAAenB,IAAf,CAAoBmB,IAApB,CAAL,EAAgC;AAC5BA,MAAAA,IAAI,GAAGC,SAAS,CAACD,IAAD,CAAT,CAAgBvB,OAAhB,CAAwB,MAAxB,EAAgC,GAAhC,EAAqCA,OAArC,CAA6C,MAA7C,EAAqD,GAArD,CAAP;AACH;;AACD,WAAOuB,IAAP;AACH,GAPM,EAQFd,IARE,CAQG,EARH,CAAP;AASH;;AACD,SAASgB,gBAAT,CAA0BH,GAA1B,EAA+B;AAC3B,SAAOd,kBAAkB,CAACc,GAAD,CAAlB,CAAwBtB,OAAxB,CAAgC,UAAhC,EAA4C,UAAU0B,CAAV,EAAa;AAC5D,WAAO,MAAMA,CAAC,CAACC,UAAF,CAAa,CAAb,EAAgBC,QAAhB,CAAyB,EAAzB,EAA6BC,WAA7B,EAAb;AACH,GAFM,CAAP;AAGH;;AACD,SAASC,WAAT,CAAqBC,QAArB,EAA+BC,KAA/B,EAAsC3D,GAAtC,EAA2C;AACvC2D,EAAAA,KAAK,GACDD,QAAQ,KAAK,GAAb,IAAoBA,QAAQ,KAAK,GAAjC,GACMV,cAAc,CAACW,KAAD,CADpB,GAEMP,gBAAgB,CAACO,KAAD,CAH1B;;AAIA,MAAI3D,GAAJ,EAAS;AACL,WAAOoD,gBAAgB,CAACpD,GAAD,CAAhB,GAAwB,GAAxB,GAA8B2D,KAArC;AACH,GAFD,MAGK;AACD,WAAOA,KAAP;AACH;AACJ;;AACD,SAASC,SAAT,CAAmBD,KAAnB,EAA0B;AACtB,SAAOA,KAAK,KAAKhD,SAAV,IAAuBgD,KAAK,KAAK,IAAxC;AACH;;AACD,SAASE,aAAT,CAAuBH,QAAvB,EAAiC;AAC7B,SAAOA,QAAQ,KAAK,GAAb,IAAoBA,QAAQ,KAAK,GAAjC,IAAwCA,QAAQ,KAAK,GAA5D;AACH;;AACD,SAASI,SAAT,CAAmBC,OAAnB,EAA4BL,QAA5B,EAAsC1D,GAAtC,EAA2CgE,QAA3C,EAAqD;AACjD,MAAIL,KAAK,GAAGI,OAAO,CAAC/D,GAAD,CAAnB;AAAA,MAA0BK,MAAM,GAAG,EAAnC;;AACA,MAAIuD,SAAS,CAACD,KAAD,CAAT,IAAoBA,KAAK,KAAK,EAAlC,EAAsC;AAClC,QAAI,OAAOA,KAAP,KAAiB,QAAjB,IACA,OAAOA,KAAP,KAAiB,QADjB,IAEA,OAAOA,KAAP,KAAiB,SAFrB,EAEgC;AAC5BA,MAAAA,KAAK,GAAGA,KAAK,CAACJ,QAAN,EAAR;;AACA,UAAIS,QAAQ,IAAIA,QAAQ,KAAK,GAA7B,EAAkC;AAC9BL,QAAAA,KAAK,GAAGA,KAAK,CAACM,SAAN,CAAgB,CAAhB,EAAmBC,QAAQ,CAACF,QAAD,EAAW,EAAX,CAA3B,CAAR;AACH;;AACD3D,MAAAA,MAAM,CAAC8D,IAAP,CAAYV,WAAW,CAACC,QAAD,EAAWC,KAAX,EAAkBE,aAAa,CAACH,QAAD,CAAb,GAA0B1D,GAA1B,GAAgC,EAAlD,CAAvB;AACH,KARD,MASK;AACD,UAAIgE,QAAQ,KAAK,GAAjB,EAAsB;AAClB,YAAII,KAAK,CAACC,OAAN,CAAcV,KAAd,CAAJ,EAA0B;AACtBA,UAAAA,KAAK,CAACrC,MAAN,CAAasC,SAAb,EAAwBrD,OAAxB,CAAgC,UAAUoD,KAAV,EAAiB;AAC7CtD,YAAAA,MAAM,CAAC8D,IAAP,CAAYV,WAAW,CAACC,QAAD,EAAWC,KAAX,EAAkBE,aAAa,CAACH,QAAD,CAAb,GAA0B1D,GAA1B,GAAgC,EAAlD,CAAvB;AACH,WAFD;AAGH,SAJD,MAKK;AACDJ,UAAAA,MAAM,CAACC,IAAP,CAAY8D,KAAZ,EAAmBpD,OAAnB,CAA2B,UAAU+D,CAAV,EAAa;AACpC,gBAAIV,SAAS,CAACD,KAAK,CAACW,CAAD,CAAN,CAAb,EAAyB;AACrBjE,cAAAA,MAAM,CAAC8D,IAAP,CAAYV,WAAW,CAACC,QAAD,EAAWC,KAAK,CAACW,CAAD,CAAhB,EAAqBA,CAArB,CAAvB;AACH;AACJ,WAJD;AAKH;AACJ,OAbD,MAcK;AACD,cAAMC,GAAG,GAAG,EAAZ;;AACA,YAAIH,KAAK,CAACC,OAAN,CAAcV,KAAd,CAAJ,EAA0B;AACtBA,UAAAA,KAAK,CAACrC,MAAN,CAAasC,SAAb,EAAwBrD,OAAxB,CAAgC,UAAUoD,KAAV,EAAiB;AAC7CY,YAAAA,GAAG,CAACJ,IAAJ,CAASV,WAAW,CAACC,QAAD,EAAWC,KAAX,CAApB;AACH,WAFD;AAGH,SAJD,MAKK;AACD/D,UAAAA,MAAM,CAACC,IAAP,CAAY8D,KAAZ,EAAmBpD,OAAnB,CAA2B,UAAU+D,CAAV,EAAa;AACpC,gBAAIV,SAAS,CAACD,KAAK,CAACW,CAAD,CAAN,CAAb,EAAyB;AACrBC,cAAAA,GAAG,CAACJ,IAAJ,CAASf,gBAAgB,CAACkB,CAAD,CAAzB;AACAC,cAAAA,GAAG,CAACJ,IAAJ,CAASV,WAAW,CAACC,QAAD,EAAWC,KAAK,CAACW,CAAD,CAAL,CAASf,QAAT,EAAX,CAApB;AACH;AACJ,WALD;AAMH;;AACD,YAAIM,aAAa,CAACH,QAAD,CAAjB,EAA6B;AACzBrD,UAAAA,MAAM,CAAC8D,IAAP,CAAYf,gBAAgB,CAACpD,GAAD,CAAhB,GAAwB,GAAxB,GAA8BuE,GAAG,CAACnC,IAAJ,CAAS,GAAT,CAA1C;AACH,SAFD,MAGK,IAAImC,GAAG,CAAClD,MAAJ,KAAe,CAAnB,EAAsB;AACvBhB,UAAAA,MAAM,CAAC8D,IAAP,CAAYI,GAAG,CAACnC,IAAJ,CAAS,GAAT,CAAZ;AACH;AACJ;AACJ;AACJ,GAhDD,MAiDK;AACD,QAAIsB,QAAQ,KAAK,GAAjB,EAAsB;AAClB,UAAIE,SAAS,CAACD,KAAD,CAAb,EAAsB;AAClBtD,QAAAA,MAAM,CAAC8D,IAAP,CAAYf,gBAAgB,CAACpD,GAAD,CAA5B;AACH;AACJ,KAJD,MAKK,IAAI2D,KAAK,KAAK,EAAV,KAAiBD,QAAQ,KAAK,GAAb,IAAoBA,QAAQ,KAAK,GAAlD,CAAJ,EAA4D;AAC7DrD,MAAAA,MAAM,CAAC8D,IAAP,CAAYf,gBAAgB,CAACpD,GAAD,CAAhB,GAAwB,GAApC;AACH,KAFI,MAGA,IAAI2D,KAAK,KAAK,EAAd,EAAkB;AACnBtD,MAAAA,MAAM,CAAC8D,IAAP,CAAY,EAAZ;AACH;AACJ;;AACD,SAAO9D,MAAP;AACH;;AACD,AAAO,SAASmE,QAAT,CAAkBC,QAAlB,EAA4B;AAC/B,SAAO;AACHC,IAAAA,MAAM,EAAEA,MAAM,CAACC,IAAP,CAAY,IAAZ,EAAkBF,QAAlB;AADL,GAAP;AAGH;;AACD,SAASC,MAAT,CAAgBD,QAAhB,EAA0BV,OAA1B,EAAmC;AAC/B,MAAIa,SAAS,GAAG,CAAC,GAAD,EAAM,GAAN,EAAW,GAAX,EAAgB,GAAhB,EAAqB,GAArB,EAA0B,GAA1B,EAA+B,GAA/B,CAAhB;AACA,SAAOH,QAAQ,CAAC9C,OAAT,CAAiB,4BAAjB,EAA+C,UAAUkD,CAAV,EAAaC,UAAb,EAAyBC,OAAzB,EAAkC;AACpF,QAAID,UAAJ,EAAgB;AACZ,UAAIpB,QAAQ,GAAG,EAAf;AACA,YAAMsB,MAAM,GAAG,EAAf;;AACA,UAAIJ,SAAS,CAACK,OAAV,CAAkBH,UAAU,CAACI,MAAX,CAAkB,CAAlB,CAAlB,MAA4C,CAAC,CAAjD,EAAoD;AAChDxB,QAAAA,QAAQ,GAAGoB,UAAU,CAACI,MAAX,CAAkB,CAAlB,CAAX;AACAJ,QAAAA,UAAU,GAAGA,UAAU,CAACK,MAAX,CAAkB,CAAlB,CAAb;AACH;;AACDL,MAAAA,UAAU,CAAC9D,KAAX,CAAiB,IAAjB,EAAuBT,OAAvB,CAA+B,UAAU6E,QAAV,EAAoB;AAC/C,YAAIb,GAAG,GAAG,4BAA4Bc,IAA5B,CAAiCD,QAAjC,CAAV;AACAJ,QAAAA,MAAM,CAACb,IAAP,CAAYL,SAAS,CAACC,OAAD,EAAUL,QAAV,EAAoBa,GAAG,CAAC,CAAD,CAAvB,EAA4BA,GAAG,CAAC,CAAD,CAAH,IAAUA,GAAG,CAAC,CAAD,CAAzC,CAArB;AACH,OAHD;;AAIA,UAAIb,QAAQ,IAAIA,QAAQ,KAAK,GAA7B,EAAkC;AAC9B,YAAI5B,SAAS,GAAG,GAAhB;;AACA,YAAI4B,QAAQ,KAAK,GAAjB,EAAsB;AAClB5B,UAAAA,SAAS,GAAG,GAAZ;AACH,SAFD,MAGK,IAAI4B,QAAQ,KAAK,GAAjB,EAAsB;AACvB5B,UAAAA,SAAS,GAAG4B,QAAZ;AACH;;AACD,eAAO,CAACsB,MAAM,CAAC3D,MAAP,KAAkB,CAAlB,GAAsBqC,QAAtB,GAAiC,EAAlC,IAAwCsB,MAAM,CAAC5C,IAAP,CAAYN,SAAZ,CAA/C;AACH,OATD,MAUK;AACD,eAAOkD,MAAM,CAAC5C,IAAP,CAAY,GAAZ,CAAP;AACH;AACJ,KAxBD,MAyBK;AACD,aAAOY,cAAc,CAAC+B,OAAD,CAArB;AACH;AACJ,GA7BM,CAAP;AA8BH;;AC/JM,SAASO,KAAT,CAAelF,OAAf,EAAwB;AAC3B;AACA,MAAIU,MAAM,GAAGV,OAAO,CAACU,MAAR,CAAe0C,WAAf,EAAb,CAF2B;;AAI3B,MAAIzC,GAAG,GAAG,CAACX,OAAO,CAACW,GAAR,IAAe,GAAhB,EAAqBY,OAArB,CAA6B,cAA7B,EAA6C,MAA7C,CAAV;AACA,MAAIV,OAAO,GAAGrB,MAAM,CAACU,MAAP,CAAc,EAAd,EAAkBF,OAAO,CAACa,OAA1B,CAAd;AACA,MAAIsE,IAAJ;AACA,MAAI1D,UAAU,GAAGgB,IAAI,CAACzC,OAAD,EAAU,CAC3B,QAD2B,EAE3B,SAF2B,EAG3B,KAH2B,EAI3B,SAJ2B,EAK3B,SAL2B,EAM3B,WAN2B,CAAV,CAArB,CAP2B;;AAgB3B,QAAMoF,gBAAgB,GAAGhD,uBAAuB,CAACzB,GAAD,CAAhD;AACAA,EAAAA,GAAG,GAAGyD,QAAQ,CAACzD,GAAD,CAAR,CAAc2D,MAAd,CAAqB7C,UAArB,CAAN;;AACA,MAAI,CAAC,QAAQE,IAAR,CAAahB,GAAb,CAAL,EAAwB;AACpBA,IAAAA,GAAG,GAAGX,OAAO,CAACqF,OAAR,GAAkB1E,GAAxB;AACH;;AACD,QAAM2E,iBAAiB,GAAG9F,MAAM,CAACC,IAAP,CAAYO,OAAZ,EACrBkB,MADqB,CACbyB,MAAD,IAAYyC,gBAAgB,CAAChE,QAAjB,CAA0BuB,MAA1B,CADE,EAErBtB,MAFqB,CAEd,SAFc,CAA1B;AAGA,QAAMkE,mBAAmB,GAAG9C,IAAI,CAAChB,UAAD,EAAa6D,iBAAb,CAAhC;AACA,QAAME,eAAe,GAAG,6BAA6B7D,IAA7B,CAAkCd,OAAO,CAAC4E,MAA1C,CAAxB;;AACA,MAAI,CAACD,eAAL,EAAsB;AAClB,QAAIxF,OAAO,CAACe,SAAR,CAAkB2E,MAAtB,EAA8B;AAC1B;AACA7E,MAAAA,OAAO,CAAC4E,MAAR,GAAiB5E,OAAO,CAAC4E,MAAR,CACZ7E,KADY,CACN,GADM,EAEZU,GAFY,CAEPH,OAAD,IAAaA,OAAO,CAACI,OAAR,CAAgB,kDAAhB,EAAqE,uBAAsBvB,OAAO,CAACe,SAAR,CAAkB2E,MAAO,EAApH,CAFL,EAGZ1D,IAHY,CAGP,GAHO,CAAjB;AAIH;;AACD,QAAIhC,OAAO,CAACe,SAAR,CAAkBC,QAAlB,CAA2BC,MAA/B,EAAuC;AACnC,YAAM0E,wBAAwB,GAAG9E,OAAO,CAAC4E,MAAR,CAAenD,KAAf,CAAqB,qBAArB,KAA+C,EAAhF;AACAzB,MAAAA,OAAO,CAAC4E,MAAR,GAAiBE,wBAAwB,CACpCtE,MADY,CACLrB,OAAO,CAACe,SAAR,CAAkBC,QADb,EAEZM,GAFY,CAEPH,OAAD,IAAa;AAClB,cAAMuE,MAAM,GAAG1F,OAAO,CAACe,SAAR,CAAkB2E,MAAlB,GACR,IAAG1F,OAAO,CAACe,SAAR,CAAkB2E,MAAO,EADpB,GAET,OAFN;AAGA,eAAQ,0BAAyBvE,OAAQ,WAAUuE,MAAO,EAA1D;AACH,OAPgB,EAQZ1D,IARY,CAQP,GARO,CAAjB;AASH;AACJ,GA9C0B;AAgD3B;;;AACA,MAAI,CAAC,KAAD,EAAQ,MAAR,EAAgBZ,QAAhB,CAAyBV,MAAzB,CAAJ,EAAsC;AAClCC,IAAAA,GAAG,GAAGa,kBAAkB,CAACb,GAAD,EAAM4E,mBAAN,CAAxB;AACH,GAFD,MAGK;AACD,QAAI,UAAUA,mBAAd,EAAmC;AAC/BJ,MAAAA,IAAI,GAAGI,mBAAmB,CAACK,IAA3B;AACH,KAFD,MAGK;AACD,UAAIpG,MAAM,CAACC,IAAP,CAAY8F,mBAAZ,EAAiCtE,MAArC,EAA6C;AACzCkE,QAAAA,IAAI,GAAGI,mBAAP;AACH,OAFD,MAGK;AACD1E,QAAAA,OAAO,CAAC,gBAAD,CAAP,GAA4B,CAA5B;AACH;AACJ;AACJ,GAhE0B;;;AAkE3B,MAAI,CAACA,OAAO,CAAC,cAAD,CAAR,IAA4B,OAAOsE,IAAP,KAAgB,WAAhD,EAA6D;AACzDtE,IAAAA,OAAO,CAAC,cAAD,CAAP,GAA0B,iCAA1B;AACH,GApE0B;AAsE3B;;;AACA,MAAI,CAAC,OAAD,EAAU,KAAV,EAAiBO,QAAjB,CAA0BV,MAA1B,KAAqC,OAAOyE,IAAP,KAAgB,WAAzD,EAAsE;AAClEA,IAAAA,IAAI,GAAG,EAAP;AACH,GAzE0B;;;AA2E3B,SAAO3F,MAAM,CAACU,MAAP,CAAc;AAAEQ,IAAAA,MAAF;AAAUC,IAAAA,GAAV;AAAeE,IAAAA;AAAf,GAAd,EAAwC,OAAOsE,IAAP,KAAgB,WAAhB,GAA8B;AAAEA,IAAAA;AAAF,GAA9B,GAAyC,IAAjF,EAAuFnF,OAAO,CAAC6F,OAAR,GAAkB;AAAEA,IAAAA,OAAO,EAAE7F,OAAO,CAAC6F;AAAnB,GAAlB,GAAiD,IAAxI,CAAP;AACH;;AC9EM,SAASC,oBAAT,CAA8B/F,QAA9B,EAAwCU,KAAxC,EAA+CT,OAA/C,EAAwD;AAC3D,SAAOkF,KAAK,CAAC1E,KAAK,CAACT,QAAD,EAAWU,KAAX,EAAkBT,OAAlB,CAAN,CAAZ;AACH;;ACDM,SAAS+F,YAAT,CAAsBC,WAAtB,EAAmCC,WAAnC,EAAgD;AACnD,QAAMC,QAAQ,GAAG1F,KAAK,CAACwF,WAAD,EAAcC,WAAd,CAAtB;AACA,QAAME,QAAQ,GAAGL,oBAAoB,CAACvB,IAArB,CAA0B,IAA1B,EAAgC2B,QAAhC,CAAjB;AACA,SAAO1G,MAAM,CAACU,MAAP,CAAciG,QAAd,EAAwB;AAC3BD,IAAAA,QAD2B;AAE3BnG,IAAAA,QAAQ,EAAEgG,YAAY,CAACxB,IAAb,CAAkB,IAAlB,EAAwB2B,QAAxB,CAFiB;AAG3B1F,IAAAA,KAAK,EAAEA,KAAK,CAAC+D,IAAN,CAAW,IAAX,EAAiB2B,QAAjB,CAHoB;AAI3BhB,IAAAA;AAJ2B,GAAxB,CAAP;AAMH;;ACZM,MAAMkB,OAAO,GAAG,mBAAhB;;ACEP,MAAMC,SAAS,GAAI,uBAAsBD,OAAQ,IAAGE,+BAAY,EAAG,EAAnE;AAEA;;AACA,AAAO,MAAMJ,QAAQ,GAAG;AACpBxF,EAAAA,MAAM,EAAE,KADY;AAEpB2E,EAAAA,OAAO,EAAE,wBAFW;AAGpBxE,EAAAA,OAAO,EAAE;AACL4E,IAAAA,MAAM,EAAE,gCADH;AAEL,kBAAcY;AAFT,GAHW;AAOpBtF,EAAAA,SAAS,EAAE;AACP2E,IAAAA,MAAM,EAAE,EADD;AAEP1E,IAAAA,QAAQ,EAAE;AAFH;AAPS,CAAjB;;MCHMmF,QAAQ,GAAGJ,YAAY,CAAC,IAAD,EAAOG,QAAP,CAA7B;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/endpoint/dist-src/version.js b/node_modules/@octokit/endpoint/dist-src/version.js index 253f86ae..930e2557 100644 --- a/node_modules/@octokit/endpoint/dist-src/version.js +++ b/node_modules/@octokit/endpoint/dist-src/version.js @@ -1 +1 @@ -export const VERSION = "6.0.11"; +export const VERSION = "6.0.12"; diff --git a/node_modules/@octokit/endpoint/dist-types/version.d.ts b/node_modules/@octokit/endpoint/dist-types/version.d.ts index fb46c4c5..330d47ae 100644 --- a/node_modules/@octokit/endpoint/dist-types/version.d.ts +++ b/node_modules/@octokit/endpoint/dist-types/version.d.ts @@ -1 +1 @@ -export declare const VERSION = "6.0.11"; +export declare const VERSION = "6.0.12"; diff --git a/node_modules/@octokit/endpoint/dist-web/index.js b/node_modules/@octokit/endpoint/dist-web/index.js index b6ef5632..e1521639 100644 --- a/node_modules/@octokit/endpoint/dist-web/index.js +++ b/node_modules/@octokit/endpoint/dist-web/index.js @@ -357,7 +357,7 @@ function withDefaults(oldDefaults, newDefaults) { }); } -const VERSION = "6.0.11"; +const VERSION = "6.0.12"; const userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. diff --git a/node_modules/@octokit/endpoint/dist-web/index.js.map b/node_modules/@octokit/endpoint/dist-web/index.js.map index b98ae12d..1d60d026 100644 --- a/node_modules/@octokit/endpoint/dist-web/index.js.map +++ b/node_modules/@octokit/endpoint/dist-web/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/util/lowercase-keys.js","../dist-src/util/merge-deep.js","../dist-src/util/remove-undefined-properties.js","../dist-src/merge.js","../dist-src/util/add-query-parameters.js","../dist-src/util/extract-url-variable-names.js","../dist-src/util/omit.js","../dist-src/util/url-template.js","../dist-src/parse.js","../dist-src/endpoint-with-defaults.js","../dist-src/with-defaults.js","../dist-src/version.js","../dist-src/defaults.js","../dist-src/index.js"],"sourcesContent":["export function lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n","import { isPlainObject } from \"is-plain-object\";\nexport function mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach((key) => {\n if (isPlainObject(options[key])) {\n if (!(key in defaults))\n Object.assign(result, { [key]: options[key] });\n else\n result[key] = mergeDeep(defaults[key], options[key]);\n }\n else {\n Object.assign(result, { [key]: options[key] });\n }\n });\n return result;\n}\n","export function removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n return obj;\n}\n","import { lowercaseKeys } from \"./util/lowercase-keys\";\nimport { mergeDeep } from \"./util/merge-deep\";\nimport { removeUndefinedProperties } from \"./util/remove-undefined-properties\";\nexport function merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? { method, url } : { url: method }, options);\n }\n else {\n options = Object.assign({}, route);\n }\n // lowercase header names before merging with defaults to avoid duplicates\n options.headers = lowercaseKeys(options.headers);\n // remove properties with undefined values before merging\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n // mediaType.previews arrays are merged, instead of overwritten\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews\n .filter((preview) => !mergedOptions.mediaType.previews.includes(preview))\n .concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map((preview) => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n","export function addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return (url +\n separator +\n names\n .map((name) => {\n if (name === \"q\") {\n return (\"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\"));\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n })\n .join(\"&\"));\n}\n","const urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nexport function extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n","export function omit(object, keysToOmit) {\n return Object.keys(object)\n .filter((option) => !keysToOmit.includes(option))\n .reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n","// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str\n .split(/(%[0-9A-Fa-f]{2})/g)\n .map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n })\n .join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value =\n operator === \"+\" || operator === \"#\"\n ? encodeReserved(value)\n : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n }\n else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key], result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" ||\n typeof value === \"number\" ||\n typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n }\n else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n }\n else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n }\n else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n }\n else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n }\n else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n }\n else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n }\n else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n }\n else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nexport function parseUrl(template) {\n return {\n expand: expand.bind(null, template),\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n }\n else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n }\n else {\n return values.join(\",\");\n }\n }\n else {\n return encodeReserved(literal);\n }\n });\n}\n","import { addQueryParameters } from \"./util/add-query-parameters\";\nimport { extractUrlVariableNames } from \"./util/extract-url-variable-names\";\nimport { omit } from \"./util/omit\";\nimport { parseUrl } from \"./util/url-template\";\nexport function parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase();\n // replace :varname with {varname} to make it RFC 6570 compatible\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"mediaType\",\n ]);\n // extract variable names from URL to calculate remaining variables later\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options)\n .filter((option) => urlVariableNames.includes(option))\n .concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept\n .split(/,/)\n .map((preview) => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`))\n .join(\",\");\n }\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader\n .concat(options.mediaType.previews)\n .map((preview) => {\n const format = options.mediaType.format\n ? `.${options.mediaType.format}`\n : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n })\n .join(\",\");\n }\n }\n // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n }\n else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n }\n else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n else {\n headers[\"content-length\"] = 0;\n }\n }\n }\n // default content-type for JSON if body is set\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n // Only return body/request keys if present\n return Object.assign({ method, url, headers }, typeof body !== \"undefined\" ? { body } : null, options.request ? { request: options.request } : null);\n}\n","import { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nexport function endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n","import { endpointWithDefaults } from \"./endpoint-with-defaults\";\nimport { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nexport function withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse,\n });\n}\n","export const VERSION = \"6.0.11\";\n","import { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nconst userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`;\n// DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\nexport const DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent,\n },\n mediaType: {\n format: \"\",\n previews: [],\n },\n};\n","import { withDefaults } from \"./with-defaults\";\nimport { DEFAULTS } from \"./defaults\";\nexport const endpoint = withDefaults(null, DEFAULTS);\n"],"names":[],"mappings":";;;AAAO,SAAS,aAAa,CAAC,MAAM,EAAE;AACtC,IAAI,IAAI,CAAC,MAAM,EAAE;AACjB,QAAQ,OAAO,EAAE,CAAC;AAClB,KAAK;AACL,IAAI,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,GAAG,KAAK;AACvD,QAAQ,MAAM,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;AAChD,QAAQ,OAAO,MAAM,CAAC;AACtB,KAAK,EAAE,EAAE,CAAC,CAAC;AACX;;ACPO,SAAS,SAAS,CAAC,QAAQ,EAAE,OAAO,EAAE;AAC7C,IAAI,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,QAAQ,CAAC,CAAC;AAC/C,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,KAAK;AAC1C,QAAQ,IAAI,aAAa,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE;AACzC,YAAY,IAAI,EAAE,GAAG,IAAI,QAAQ,CAAC;AAClC,gBAAgB,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;AAC/D;AACA,gBAAgB,MAAM,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC;AACrE,SAAS;AACT,aAAa;AACb,YAAY,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;AAC3D,SAAS;AACT,KAAK,CAAC,CAAC;AACP,IAAI,OAAO,MAAM,CAAC;AAClB,CAAC;;ACfM,SAAS,yBAAyB,CAAC,GAAG,EAAE;AAC/C,IAAI,KAAK,MAAM,GAAG,IAAI,GAAG,EAAE;AAC3B,QAAQ,IAAI,GAAG,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;AACpC,YAAY,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC;AAC5B,SAAS;AACT,KAAK;AACL,IAAI,OAAO,GAAG,CAAC;AACf,CAAC;;ACJM,SAAS,KAAK,CAAC,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE;AAChD,IAAI,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AACnC,QAAQ,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC7C,QAAQ,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,GAAG,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,EAAE,EAAE,OAAO,CAAC,CAAC;AAClF,KAAK;AACL,SAAS;AACT,QAAQ,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;AAC3C,KAAK;AACL;AACA,IAAI,OAAO,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AACrD;AACA,IAAI,yBAAyB,CAAC,OAAO,CAAC,CAAC;AACvC,IAAI,yBAAyB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAC/C,IAAI,MAAM,aAAa,GAAG,SAAS,CAAC,QAAQ,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC;AAC7D;AACA,IAAI,IAAI,QAAQ,IAAI,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE;AACxD,QAAQ,aAAa,CAAC,SAAS,CAAC,QAAQ,GAAG,QAAQ,CAAC,SAAS,CAAC,QAAQ;AACtE,aAAa,MAAM,CAAC,CAAC,OAAO,KAAK,CAAC,aAAa,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AACrF,aAAa,MAAM,CAAC,aAAa,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;AACtD,KAAK;AACL,IAAI,aAAa,CAAC,SAAS,CAAC,QAAQ,GAAG,aAAa,CAAC,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,KAAK,OAAO,CAAC,OAAO,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC,CAAC;AAC1H,IAAI,OAAO,aAAa,CAAC;AACzB,CAAC;;ACzBM,SAAS,kBAAkB,CAAC,GAAG,EAAE,UAAU,EAAE;AACpD,IAAI,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,CAAC;AACjD,IAAI,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1C,IAAI,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;AAC5B,QAAQ,OAAO,GAAG,CAAC;AACnB,KAAK;AACL,IAAI,QAAQ,GAAG;AACf,QAAQ,SAAS;AACjB,QAAQ,KAAK;AACb,aAAa,GAAG,CAAC,CAAC,IAAI,KAAK;AAC3B,YAAY,IAAI,IAAI,KAAK,GAAG,EAAE;AAC9B,gBAAgB,QAAQ,IAAI,GAAG,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;AAC1F,aAAa;AACb,YAAY,OAAO,CAAC,EAAE,IAAI,CAAC,CAAC,EAAE,kBAAkB,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACrE,SAAS,CAAC;AACV,aAAa,IAAI,CAAC,GAAG,CAAC,EAAE;AACxB,CAAC;;AChBD,MAAM,gBAAgB,GAAG,YAAY,CAAC;AACtC,SAAS,cAAc,CAAC,YAAY,EAAE;AACtC,IAAI,OAAO,YAAY,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC7D,CAAC;AACD,AAAO,SAAS,uBAAuB,CAAC,GAAG,EAAE;AAC7C,IAAI,MAAM,OAAO,GAAG,GAAG,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;AAChD,IAAI,IAAI,CAAC,OAAO,EAAE;AAClB,QAAQ,OAAO,EAAE,CAAC;AAClB,KAAK;AACL,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AACzE,CAAC;;ACVM,SAAS,IAAI,CAAC,MAAM,EAAE,UAAU,EAAE;AACzC,IAAI,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC;AAC9B,SAAS,MAAM,CAAC,CAAC,MAAM,KAAK,CAAC,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;AACzD,SAAS,MAAM,CAAC,CAAC,GAAG,EAAE,GAAG,KAAK;AAC9B,QAAQ,GAAG,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;AAC/B,QAAQ,OAAO,GAAG,CAAC;AACnB,KAAK,EAAE,EAAE,CAAC,CAAC;AACX,CAAC;;ACPD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS,cAAc,CAAC,GAAG,EAAE;AAC7B,IAAI,OAAO,GAAG;AACd,SAAS,KAAK,CAAC,oBAAoB,CAAC;AACpC,SAAS,GAAG,CAAC,UAAU,IAAI,EAAE;AAC7B,QAAQ,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;AACxC,YAAY,IAAI,GAAG,SAAS,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAC7E,SAAS;AACT,QAAQ,OAAO,IAAI,CAAC;AACpB,KAAK,CAAC;AACN,SAAS,IAAI,CAAC,EAAE,CAAC,CAAC;AAClB,CAAC;AACD,SAAS,gBAAgB,CAAC,GAAG,EAAE;AAC/B,IAAI,OAAO,kBAAkB,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,UAAU,CAAC,EAAE;AACpE,QAAQ,OAAO,GAAG,GAAG,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC;AAChE,KAAK,CAAC,CAAC;AACP,CAAC;AACD,SAAS,WAAW,CAAC,QAAQ,EAAE,KAAK,EAAE,GAAG,EAAE;AAC3C,IAAI,KAAK;AACT,QAAQ,QAAQ,KAAK,GAAG,IAAI,QAAQ,KAAK,GAAG;AAC5C,cAAc,cAAc,CAAC,KAAK,CAAC;AACnC,cAAc,gBAAgB,CAAC,KAAK,CAAC,CAAC;AACtC,IAAI,IAAI,GAAG,EAAE;AACb,QAAQ,OAAO,gBAAgB,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,KAAK,CAAC;AACnD,KAAK;AACL,SAAS;AACT,QAAQ,OAAO,KAAK,CAAC;AACrB,KAAK;AACL,CAAC;AACD,SAAS,SAAS,CAAC,KAAK,EAAE;AAC1B,IAAI,OAAO,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,CAAC;AACjD,CAAC;AACD,SAAS,aAAa,CAAC,QAAQ,EAAE;AACjC,IAAI,OAAO,QAAQ,KAAK,GAAG,IAAI,QAAQ,KAAK,GAAG,IAAI,QAAQ,KAAK,GAAG,CAAC;AACpE,CAAC;AACD,SAAS,SAAS,CAAC,OAAO,EAAE,QAAQ,EAAE,GAAG,EAAE,QAAQ,EAAE;AACrD,IAAI,IAAI,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC;AAC1C,IAAI,IAAI,SAAS,CAAC,KAAK,CAAC,IAAI,KAAK,KAAK,EAAE,EAAE;AAC1C,QAAQ,IAAI,OAAO,KAAK,KAAK,QAAQ;AACrC,YAAY,OAAO,KAAK,KAAK,QAAQ;AACrC,YAAY,OAAO,KAAK,KAAK,SAAS,EAAE;AACxC,YAAY,KAAK,GAAG,KAAK,CAAC,QAAQ,EAAE,CAAC;AACrC,YAAY,IAAI,QAAQ,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC9C,gBAAgB,KAAK,GAAG,KAAK,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC,CAAC;AACnE,aAAa;AACb,YAAY,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,QAAQ,CAAC,GAAG,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC;AAC1F,SAAS;AACT,aAAa;AACb,YAAY,IAAI,QAAQ,KAAK,GAAG,EAAE;AAClC,gBAAgB,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;AAC1C,oBAAoB,KAAK,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAU,KAAK,EAAE;AACrE,wBAAwB,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,QAAQ,CAAC,GAAG,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC;AACtG,qBAAqB,CAAC,CAAC;AACvB,iBAAiB;AACjB,qBAAqB;AACrB,oBAAoB,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;AAC5D,wBAAwB,IAAI,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACjD,4BAA4B,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAC5E,yBAAyB;AACzB,qBAAqB,CAAC,CAAC;AACvB,iBAAiB;AACjB,aAAa;AACb,iBAAiB;AACjB,gBAAgB,MAAM,GAAG,GAAG,EAAE,CAAC;AAC/B,gBAAgB,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;AAC1C,oBAAoB,KAAK,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAU,KAAK,EAAE;AACrE,wBAAwB,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;AAC/D,qBAAqB,CAAC,CAAC;AACvB,iBAAiB;AACjB,qBAAqB;AACrB,oBAAoB,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;AAC5D,wBAAwB,IAAI,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACjD,4BAA4B,GAAG,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1D,4BAA4B,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC;AACjF,yBAAyB;AACzB,qBAAqB,CAAC,CAAC;AACvB,iBAAiB;AACjB,gBAAgB,IAAI,aAAa,CAAC,QAAQ,CAAC,EAAE;AAC7C,oBAAoB,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7E,iBAAiB;AACjB,qBAAqB,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;AAC3C,oBAAoB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC/C,iBAAiB;AACjB,aAAa;AACb,SAAS;AACT,KAAK;AACL,SAAS;AACT,QAAQ,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC9B,YAAY,IAAI,SAAS,CAAC,KAAK,CAAC,EAAE;AAClC,gBAAgB,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC;AACnD,aAAa;AACb,SAAS;AACT,aAAa,IAAI,KAAK,KAAK,EAAE,KAAK,QAAQ,KAAK,GAAG,IAAI,QAAQ,KAAK,GAAG,CAAC,EAAE;AACzE,YAAY,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC;AACrD,SAAS;AACT,aAAa,IAAI,KAAK,KAAK,EAAE,EAAE;AAC/B,YAAY,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC5B,SAAS;AACT,KAAK;AACL,IAAI,OAAO,MAAM,CAAC;AAClB,CAAC;AACD,AAAO,SAAS,QAAQ,CAAC,QAAQ,EAAE;AACnC,IAAI,OAAO;AACX,QAAQ,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AAC3C,KAAK,CAAC;AACN,CAAC;AACD,SAAS,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE;AACnC,IAAI,IAAI,SAAS,GAAG,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;AACxD,IAAI,OAAO,QAAQ,CAAC,OAAO,CAAC,4BAA4B,EAAE,UAAU,CAAC,EAAE,UAAU,EAAE,OAAO,EAAE;AAC5F,QAAQ,IAAI,UAAU,EAAE;AACxB,YAAY,IAAI,QAAQ,GAAG,EAAE,CAAC;AAC9B,YAAY,MAAM,MAAM,GAAG,EAAE,CAAC;AAC9B,YAAY,IAAI,SAAS,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;AAChE,gBAAgB,QAAQ,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAChD,gBAAgB,UAAU,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAClD,aAAa;AACb,YAAY,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,UAAU,QAAQ,EAAE;AAC/D,gBAAgB,IAAI,GAAG,GAAG,2BAA2B,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;AACrE,gBAAgB,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpF,aAAa,CAAC,CAAC;AACf,YAAY,IAAI,QAAQ,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC9C,gBAAgB,IAAI,SAAS,GAAG,GAAG,CAAC;AACpC,gBAAgB,IAAI,QAAQ,KAAK,GAAG,EAAE;AACtC,oBAAoB,SAAS,GAAG,GAAG,CAAC;AACpC,iBAAiB;AACjB,qBAAqB,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC3C,oBAAoB,SAAS,GAAG,QAAQ,CAAC;AACzC,iBAAiB;AACjB,gBAAgB,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,GAAG,QAAQ,GAAG,EAAE,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACtF,aAAa;AACb,iBAAiB;AACjB,gBAAgB,OAAO,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACxC,aAAa;AACb,SAAS;AACT,aAAa;AACb,YAAY,OAAO,cAAc,CAAC,OAAO,CAAC,CAAC;AAC3C,SAAS;AACT,KAAK,CAAC,CAAC;AACP,CAAC;;AC/JM,SAAS,KAAK,CAAC,OAAO,EAAE;AAC/B;AACA,IAAI,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC;AAC9C;AACA,IAAI,IAAI,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,IAAI,GAAG,EAAE,OAAO,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;AACnE,IAAI,IAAI,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;AACrD,IAAI,IAAI,IAAI,CAAC;AACb,IAAI,IAAI,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE;AACnC,QAAQ,QAAQ;AAChB,QAAQ,SAAS;AACjB,QAAQ,KAAK;AACb,QAAQ,SAAS;AACjB,QAAQ,SAAS;AACjB,QAAQ,WAAW;AACnB,KAAK,CAAC,CAAC;AACP;AACA,IAAI,MAAM,gBAAgB,GAAG,uBAAuB,CAAC,GAAG,CAAC,CAAC;AAC1D,IAAI,GAAG,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;AAC3C,IAAI,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;AAC5B,QAAQ,GAAG,GAAG,OAAO,CAAC,OAAO,GAAG,GAAG,CAAC;AACpC,KAAK;AACL,IAAI,MAAM,iBAAiB,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;AAClD,SAAS,MAAM,CAAC,CAAC,MAAM,KAAK,gBAAgB,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;AAC9D,SAAS,MAAM,CAAC,SAAS,CAAC,CAAC;AAC3B,IAAI,MAAM,mBAAmB,GAAG,IAAI,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAC;AACpE,IAAI,MAAM,eAAe,GAAG,4BAA4B,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;AAC9E,IAAI,IAAI,CAAC,eAAe,EAAE;AAC1B,QAAQ,IAAI,OAAO,CAAC,SAAS,CAAC,MAAM,EAAE;AACtC;AACA,YAAY,OAAO,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM;AAC3C,iBAAiB,KAAK,CAAC,GAAG,CAAC;AAC3B,iBAAiB,GAAG,CAAC,CAAC,OAAO,KAAK,OAAO,CAAC,OAAO,CAAC,kDAAkD,EAAE,CAAC,oBAAoB,EAAE,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AACzJ,iBAAiB,IAAI,CAAC,GAAG,CAAC,CAAC;AAC3B,SAAS;AACT,QAAQ,IAAI,OAAO,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE;AAC/C,YAAY,MAAM,wBAAwB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,CAAC,IAAI,EAAE,CAAC;AAC/F,YAAY,OAAO,CAAC,MAAM,GAAG,wBAAwB;AACrD,iBAAiB,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,QAAQ,CAAC;AACnD,iBAAiB,GAAG,CAAC,CAAC,OAAO,KAAK;AAClC,gBAAgB,MAAM,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,MAAM;AACvD,sBAAsB,CAAC,CAAC,EAAE,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;AACpD,sBAAsB,OAAO,CAAC;AAC9B,gBAAgB,OAAO,CAAC,uBAAuB,EAAE,OAAO,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC;AAC5E,aAAa,CAAC;AACd,iBAAiB,IAAI,CAAC,GAAG,CAAC,CAAC;AAC3B,SAAS;AACT,KAAK;AACL;AACA;AACA,IAAI,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE;AAC1C,QAAQ,GAAG,GAAG,kBAAkB,CAAC,GAAG,EAAE,mBAAmB,CAAC,CAAC;AAC3D,KAAK;AACL,SAAS;AACT,QAAQ,IAAI,MAAM,IAAI,mBAAmB,EAAE;AAC3C,YAAY,IAAI,GAAG,mBAAmB,CAAC,IAAI,CAAC;AAC5C,SAAS;AACT,aAAa;AACb,YAAY,IAAI,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,MAAM,EAAE;AACzD,gBAAgB,IAAI,GAAG,mBAAmB,CAAC;AAC3C,aAAa;AACb,iBAAiB;AACjB,gBAAgB,OAAO,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC;AAC9C,aAAa;AACb,SAAS;AACT,KAAK;AACL;AACA,IAAI,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;AACjE,QAAQ,OAAO,CAAC,cAAc,CAAC,GAAG,iCAAiC,CAAC;AACpE,KAAK;AACL;AACA;AACA,IAAI,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;AAC1E,QAAQ,IAAI,GAAG,EAAE,CAAC;AAClB,KAAK;AACL;AACA,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,EAAE,OAAO,IAAI,KAAK,WAAW,GAAG,EAAE,IAAI,EAAE,GAAG,IAAI,EAAE,OAAO,CAAC,OAAO,GAAG,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AACzJ,CAAC;;AC9EM,SAAS,oBAAoB,CAAC,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE;AAC/D,IAAI,OAAO,KAAK,CAAC,KAAK,CAAC,QAAQ,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC,CAAC;AAClD,CAAC;;ACDM,SAAS,YAAY,CAAC,WAAW,EAAE,WAAW,EAAE;AACvD,IAAI,MAAM,QAAQ,GAAG,KAAK,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC;AACrD,IAAI,MAAM,QAAQ,GAAG,oBAAoB,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;AAC/D,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE;AACnC,QAAQ,QAAQ;AAChB,QAAQ,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AACnD,QAAQ,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AACzC,QAAQ,KAAK;AACb,KAAK,CAAC,CAAC;AACP,CAAC;;ACZM,MAAM,OAAO,GAAG,mBAAmB,CAAC;;ACE3C,MAAM,SAAS,GAAG,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC,CAAC;AACrE;AACA;AACA,AAAO,MAAM,QAAQ,GAAG;AACxB,IAAI,MAAM,EAAE,KAAK;AACjB,IAAI,OAAO,EAAE,wBAAwB;AACrC,IAAI,OAAO,EAAE;AACb,QAAQ,MAAM,EAAE,gCAAgC;AAChD,QAAQ,YAAY,EAAE,SAAS;AAC/B,KAAK;AACL,IAAI,SAAS,EAAE;AACf,QAAQ,MAAM,EAAE,EAAE;AAClB,QAAQ,QAAQ,EAAE,EAAE;AACpB,KAAK;AACL,CAAC,CAAC;;ACdU,MAAC,QAAQ,GAAG,YAAY,CAAC,IAAI,EAAE,QAAQ,CAAC;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/util/lowercase-keys.js","../dist-src/util/merge-deep.js","../dist-src/util/remove-undefined-properties.js","../dist-src/merge.js","../dist-src/util/add-query-parameters.js","../dist-src/util/extract-url-variable-names.js","../dist-src/util/omit.js","../dist-src/util/url-template.js","../dist-src/parse.js","../dist-src/endpoint-with-defaults.js","../dist-src/with-defaults.js","../dist-src/version.js","../dist-src/defaults.js","../dist-src/index.js"],"sourcesContent":["export function lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n","import { isPlainObject } from \"is-plain-object\";\nexport function mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach((key) => {\n if (isPlainObject(options[key])) {\n if (!(key in defaults))\n Object.assign(result, { [key]: options[key] });\n else\n result[key] = mergeDeep(defaults[key], options[key]);\n }\n else {\n Object.assign(result, { [key]: options[key] });\n }\n });\n return result;\n}\n","export function removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n return obj;\n}\n","import { lowercaseKeys } from \"./util/lowercase-keys\";\nimport { mergeDeep } from \"./util/merge-deep\";\nimport { removeUndefinedProperties } from \"./util/remove-undefined-properties\";\nexport function merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? { method, url } : { url: method }, options);\n }\n else {\n options = Object.assign({}, route);\n }\n // lowercase header names before merging with defaults to avoid duplicates\n options.headers = lowercaseKeys(options.headers);\n // remove properties with undefined values before merging\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n // mediaType.previews arrays are merged, instead of overwritten\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews\n .filter((preview) => !mergedOptions.mediaType.previews.includes(preview))\n .concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map((preview) => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n","export function addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return (url +\n separator +\n names\n .map((name) => {\n if (name === \"q\") {\n return (\"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\"));\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n })\n .join(\"&\"));\n}\n","const urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nexport function extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n","export function omit(object, keysToOmit) {\n return Object.keys(object)\n .filter((option) => !keysToOmit.includes(option))\n .reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n","// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str\n .split(/(%[0-9A-Fa-f]{2})/g)\n .map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n })\n .join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value =\n operator === \"+\" || operator === \"#\"\n ? encodeReserved(value)\n : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n }\n else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key], result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" ||\n typeof value === \"number\" ||\n typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n }\n else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n }\n else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n }\n else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n }\n else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n }\n else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n }\n else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n }\n else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n }\n else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nexport function parseUrl(template) {\n return {\n expand: expand.bind(null, template),\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n }\n else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n }\n else {\n return values.join(\",\");\n }\n }\n else {\n return encodeReserved(literal);\n }\n });\n}\n","import { addQueryParameters } from \"./util/add-query-parameters\";\nimport { extractUrlVariableNames } from \"./util/extract-url-variable-names\";\nimport { omit } from \"./util/omit\";\nimport { parseUrl } from \"./util/url-template\";\nexport function parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase();\n // replace :varname with {varname} to make it RFC 6570 compatible\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"mediaType\",\n ]);\n // extract variable names from URL to calculate remaining variables later\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options)\n .filter((option) => urlVariableNames.includes(option))\n .concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept\n .split(/,/)\n .map((preview) => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`))\n .join(\",\");\n }\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader\n .concat(options.mediaType.previews)\n .map((preview) => {\n const format = options.mediaType.format\n ? `.${options.mediaType.format}`\n : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n })\n .join(\",\");\n }\n }\n // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n }\n else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n }\n else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n else {\n headers[\"content-length\"] = 0;\n }\n }\n }\n // default content-type for JSON if body is set\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n // Only return body/request keys if present\n return Object.assign({ method, url, headers }, typeof body !== \"undefined\" ? { body } : null, options.request ? { request: options.request } : null);\n}\n","import { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nexport function endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n","import { endpointWithDefaults } from \"./endpoint-with-defaults\";\nimport { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nexport function withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse,\n });\n}\n","export const VERSION = \"6.0.12\";\n","import { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nconst userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`;\n// DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\nexport const DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent,\n },\n mediaType: {\n format: \"\",\n previews: [],\n },\n};\n","import { withDefaults } from \"./with-defaults\";\nimport { DEFAULTS } from \"./defaults\";\nexport const endpoint = withDefaults(null, DEFAULTS);\n"],"names":[],"mappings":";;;AAAO,SAAS,aAAa,CAAC,MAAM,EAAE;AACtC,IAAI,IAAI,CAAC,MAAM,EAAE;AACjB,QAAQ,OAAO,EAAE,CAAC;AAClB,KAAK;AACL,IAAI,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,GAAG,KAAK;AACvD,QAAQ,MAAM,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;AAChD,QAAQ,OAAO,MAAM,CAAC;AACtB,KAAK,EAAE,EAAE,CAAC,CAAC;AACX;;ACPO,SAAS,SAAS,CAAC,QAAQ,EAAE,OAAO,EAAE;AAC7C,IAAI,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,QAAQ,CAAC,CAAC;AAC/C,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,KAAK;AAC1C,QAAQ,IAAI,aAAa,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE;AACzC,YAAY,IAAI,EAAE,GAAG,IAAI,QAAQ,CAAC;AAClC,gBAAgB,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;AAC/D;AACA,gBAAgB,MAAM,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC;AACrE,SAAS;AACT,aAAa;AACb,YAAY,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;AAC3D,SAAS;AACT,KAAK,CAAC,CAAC;AACP,IAAI,OAAO,MAAM,CAAC;AAClB,CAAC;;ACfM,SAAS,yBAAyB,CAAC,GAAG,EAAE;AAC/C,IAAI,KAAK,MAAM,GAAG,IAAI,GAAG,EAAE;AAC3B,QAAQ,IAAI,GAAG,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;AACpC,YAAY,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC;AAC5B,SAAS;AACT,KAAK;AACL,IAAI,OAAO,GAAG,CAAC;AACf,CAAC;;ACJM,SAAS,KAAK,CAAC,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE;AAChD,IAAI,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AACnC,QAAQ,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC7C,QAAQ,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,GAAG,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,EAAE,EAAE,OAAO,CAAC,CAAC;AAClF,KAAK;AACL,SAAS;AACT,QAAQ,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;AAC3C,KAAK;AACL;AACA,IAAI,OAAO,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AACrD;AACA,IAAI,yBAAyB,CAAC,OAAO,CAAC,CAAC;AACvC,IAAI,yBAAyB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAC/C,IAAI,MAAM,aAAa,GAAG,SAAS,CAAC,QAAQ,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC;AAC7D;AACA,IAAI,IAAI,QAAQ,IAAI,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE;AACxD,QAAQ,aAAa,CAAC,SAAS,CAAC,QAAQ,GAAG,QAAQ,CAAC,SAAS,CAAC,QAAQ;AACtE,aAAa,MAAM,CAAC,CAAC,OAAO,KAAK,CAAC,aAAa,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AACrF,aAAa,MAAM,CAAC,aAAa,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;AACtD,KAAK;AACL,IAAI,aAAa,CAAC,SAAS,CAAC,QAAQ,GAAG,aAAa,CAAC,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,KAAK,OAAO,CAAC,OAAO,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC,CAAC;AAC1H,IAAI,OAAO,aAAa,CAAC;AACzB,CAAC;;ACzBM,SAAS,kBAAkB,CAAC,GAAG,EAAE,UAAU,EAAE;AACpD,IAAI,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,CAAC;AACjD,IAAI,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1C,IAAI,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;AAC5B,QAAQ,OAAO,GAAG,CAAC;AACnB,KAAK;AACL,IAAI,QAAQ,GAAG;AACf,QAAQ,SAAS;AACjB,QAAQ,KAAK;AACb,aAAa,GAAG,CAAC,CAAC,IAAI,KAAK;AAC3B,YAAY,IAAI,IAAI,KAAK,GAAG,EAAE;AAC9B,gBAAgB,QAAQ,IAAI,GAAG,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;AAC1F,aAAa;AACb,YAAY,OAAO,CAAC,EAAE,IAAI,CAAC,CAAC,EAAE,kBAAkB,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACrE,SAAS,CAAC;AACV,aAAa,IAAI,CAAC,GAAG,CAAC,EAAE;AACxB,CAAC;;AChBD,MAAM,gBAAgB,GAAG,YAAY,CAAC;AACtC,SAAS,cAAc,CAAC,YAAY,EAAE;AACtC,IAAI,OAAO,YAAY,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC7D,CAAC;AACD,AAAO,SAAS,uBAAuB,CAAC,GAAG,EAAE;AAC7C,IAAI,MAAM,OAAO,GAAG,GAAG,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;AAChD,IAAI,IAAI,CAAC,OAAO,EAAE;AAClB,QAAQ,OAAO,EAAE,CAAC;AAClB,KAAK;AACL,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AACzE,CAAC;;ACVM,SAAS,IAAI,CAAC,MAAM,EAAE,UAAU,EAAE;AACzC,IAAI,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC;AAC9B,SAAS,MAAM,CAAC,CAAC,MAAM,KAAK,CAAC,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;AACzD,SAAS,MAAM,CAAC,CAAC,GAAG,EAAE,GAAG,KAAK;AAC9B,QAAQ,GAAG,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;AAC/B,QAAQ,OAAO,GAAG,CAAC;AACnB,KAAK,EAAE,EAAE,CAAC,CAAC;AACX,CAAC;;ACPD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS,cAAc,CAAC,GAAG,EAAE;AAC7B,IAAI,OAAO,GAAG;AACd,SAAS,KAAK,CAAC,oBAAoB,CAAC;AACpC,SAAS,GAAG,CAAC,UAAU,IAAI,EAAE;AAC7B,QAAQ,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;AACxC,YAAY,IAAI,GAAG,SAAS,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAC7E,SAAS;AACT,QAAQ,OAAO,IAAI,CAAC;AACpB,KAAK,CAAC;AACN,SAAS,IAAI,CAAC,EAAE,CAAC,CAAC;AAClB,CAAC;AACD,SAAS,gBAAgB,CAAC,GAAG,EAAE;AAC/B,IAAI,OAAO,kBAAkB,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,UAAU,CAAC,EAAE;AACpE,QAAQ,OAAO,GAAG,GAAG,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC;AAChE,KAAK,CAAC,CAAC;AACP,CAAC;AACD,SAAS,WAAW,CAAC,QAAQ,EAAE,KAAK,EAAE,GAAG,EAAE;AAC3C,IAAI,KAAK;AACT,QAAQ,QAAQ,KAAK,GAAG,IAAI,QAAQ,KAAK,GAAG;AAC5C,cAAc,cAAc,CAAC,KAAK,CAAC;AACnC,cAAc,gBAAgB,CAAC,KAAK,CAAC,CAAC;AACtC,IAAI,IAAI,GAAG,EAAE;AACb,QAAQ,OAAO,gBAAgB,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,KAAK,CAAC;AACnD,KAAK;AACL,SAAS;AACT,QAAQ,OAAO,KAAK,CAAC;AACrB,KAAK;AACL,CAAC;AACD,SAAS,SAAS,CAAC,KAAK,EAAE;AAC1B,IAAI,OAAO,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,CAAC;AACjD,CAAC;AACD,SAAS,aAAa,CAAC,QAAQ,EAAE;AACjC,IAAI,OAAO,QAAQ,KAAK,GAAG,IAAI,QAAQ,KAAK,GAAG,IAAI,QAAQ,KAAK,GAAG,CAAC;AACpE,CAAC;AACD,SAAS,SAAS,CAAC,OAAO,EAAE,QAAQ,EAAE,GAAG,EAAE,QAAQ,EAAE;AACrD,IAAI,IAAI,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC;AAC1C,IAAI,IAAI,SAAS,CAAC,KAAK,CAAC,IAAI,KAAK,KAAK,EAAE,EAAE;AAC1C,QAAQ,IAAI,OAAO,KAAK,KAAK,QAAQ;AACrC,YAAY,OAAO,KAAK,KAAK,QAAQ;AACrC,YAAY,OAAO,KAAK,KAAK,SAAS,EAAE;AACxC,YAAY,KAAK,GAAG,KAAK,CAAC,QAAQ,EAAE,CAAC;AACrC,YAAY,IAAI,QAAQ,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC9C,gBAAgB,KAAK,GAAG,KAAK,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC,CAAC;AACnE,aAAa;AACb,YAAY,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,QAAQ,CAAC,GAAG,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC;AAC1F,SAAS;AACT,aAAa;AACb,YAAY,IAAI,QAAQ,KAAK,GAAG,EAAE;AAClC,gBAAgB,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;AAC1C,oBAAoB,KAAK,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAU,KAAK,EAAE;AACrE,wBAAwB,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,EAAE,aAAa,CAAC,QAAQ,CAAC,GAAG,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC;AACtG,qBAAqB,CAAC,CAAC;AACvB,iBAAiB;AACjB,qBAAqB;AACrB,oBAAoB,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;AAC5D,wBAAwB,IAAI,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACjD,4BAA4B,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAC5E,yBAAyB;AACzB,qBAAqB,CAAC,CAAC;AACvB,iBAAiB;AACjB,aAAa;AACb,iBAAiB;AACjB,gBAAgB,MAAM,GAAG,GAAG,EAAE,CAAC;AAC/B,gBAAgB,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;AAC1C,oBAAoB,KAAK,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAU,KAAK,EAAE;AACrE,wBAAwB,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;AAC/D,qBAAqB,CAAC,CAAC;AACvB,iBAAiB;AACjB,qBAAqB;AACrB,oBAAoB,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;AAC5D,wBAAwB,IAAI,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACjD,4BAA4B,GAAG,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1D,4BAA4B,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC;AACjF,yBAAyB;AACzB,qBAAqB,CAAC,CAAC;AACvB,iBAAiB;AACjB,gBAAgB,IAAI,aAAa,CAAC,QAAQ,CAAC,EAAE;AAC7C,oBAAoB,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7E,iBAAiB;AACjB,qBAAqB,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;AAC3C,oBAAoB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC/C,iBAAiB;AACjB,aAAa;AACb,SAAS;AACT,KAAK;AACL,SAAS;AACT,QAAQ,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC9B,YAAY,IAAI,SAAS,CAAC,KAAK,CAAC,EAAE;AAClC,gBAAgB,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC;AACnD,aAAa;AACb,SAAS;AACT,aAAa,IAAI,KAAK,KAAK,EAAE,KAAK,QAAQ,KAAK,GAAG,IAAI,QAAQ,KAAK,GAAG,CAAC,EAAE;AACzE,YAAY,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC;AACrD,SAAS;AACT,aAAa,IAAI,KAAK,KAAK,EAAE,EAAE;AAC/B,YAAY,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC5B,SAAS;AACT,KAAK;AACL,IAAI,OAAO,MAAM,CAAC;AAClB,CAAC;AACD,AAAO,SAAS,QAAQ,CAAC,QAAQ,EAAE;AACnC,IAAI,OAAO;AACX,QAAQ,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AAC3C,KAAK,CAAC;AACN,CAAC;AACD,SAAS,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE;AACnC,IAAI,IAAI,SAAS,GAAG,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;AACxD,IAAI,OAAO,QAAQ,CAAC,OAAO,CAAC,4BAA4B,EAAE,UAAU,CAAC,EAAE,UAAU,EAAE,OAAO,EAAE;AAC5F,QAAQ,IAAI,UAAU,EAAE;AACxB,YAAY,IAAI,QAAQ,GAAG,EAAE,CAAC;AAC9B,YAAY,MAAM,MAAM,GAAG,EAAE,CAAC;AAC9B,YAAY,IAAI,SAAS,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;AAChE,gBAAgB,QAAQ,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAChD,gBAAgB,UAAU,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAClD,aAAa;AACb,YAAY,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,UAAU,QAAQ,EAAE;AAC/D,gBAAgB,IAAI,GAAG,GAAG,2BAA2B,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;AACrE,gBAAgB,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpF,aAAa,CAAC,CAAC;AACf,YAAY,IAAI,QAAQ,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC9C,gBAAgB,IAAI,SAAS,GAAG,GAAG,CAAC;AACpC,gBAAgB,IAAI,QAAQ,KAAK,GAAG,EAAE;AACtC,oBAAoB,SAAS,GAAG,GAAG,CAAC;AACpC,iBAAiB;AACjB,qBAAqB,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC3C,oBAAoB,SAAS,GAAG,QAAQ,CAAC;AACzC,iBAAiB;AACjB,gBAAgB,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,GAAG,QAAQ,GAAG,EAAE,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACtF,aAAa;AACb,iBAAiB;AACjB,gBAAgB,OAAO,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACxC,aAAa;AACb,SAAS;AACT,aAAa;AACb,YAAY,OAAO,cAAc,CAAC,OAAO,CAAC,CAAC;AAC3C,SAAS;AACT,KAAK,CAAC,CAAC;AACP,CAAC;;AC/JM,SAAS,KAAK,CAAC,OAAO,EAAE;AAC/B;AACA,IAAI,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC;AAC9C;AACA,IAAI,IAAI,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,IAAI,GAAG,EAAE,OAAO,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;AACnE,IAAI,IAAI,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;AACrD,IAAI,IAAI,IAAI,CAAC;AACb,IAAI,IAAI,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE;AACnC,QAAQ,QAAQ;AAChB,QAAQ,SAAS;AACjB,QAAQ,KAAK;AACb,QAAQ,SAAS;AACjB,QAAQ,SAAS;AACjB,QAAQ,WAAW;AACnB,KAAK,CAAC,CAAC;AACP;AACA,IAAI,MAAM,gBAAgB,GAAG,uBAAuB,CAAC,GAAG,CAAC,CAAC;AAC1D,IAAI,GAAG,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;AAC3C,IAAI,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;AAC5B,QAAQ,GAAG,GAAG,OAAO,CAAC,OAAO,GAAG,GAAG,CAAC;AACpC,KAAK;AACL,IAAI,MAAM,iBAAiB,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;AAClD,SAAS,MAAM,CAAC,CAAC,MAAM,KAAK,gBAAgB,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;AAC9D,SAAS,MAAM,CAAC,SAAS,CAAC,CAAC;AAC3B,IAAI,MAAM,mBAAmB,GAAG,IAAI,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAC;AACpE,IAAI,MAAM,eAAe,GAAG,4BAA4B,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;AAC9E,IAAI,IAAI,CAAC,eAAe,EAAE;AAC1B,QAAQ,IAAI,OAAO,CAAC,SAAS,CAAC,MAAM,EAAE;AACtC;AACA,YAAY,OAAO,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM;AAC3C,iBAAiB,KAAK,CAAC,GAAG,CAAC;AAC3B,iBAAiB,GAAG,CAAC,CAAC,OAAO,KAAK,OAAO,CAAC,OAAO,CAAC,kDAAkD,EAAE,CAAC,oBAAoB,EAAE,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AACzJ,iBAAiB,IAAI,CAAC,GAAG,CAAC,CAAC;AAC3B,SAAS;AACT,QAAQ,IAAI,OAAO,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE;AAC/C,YAAY,MAAM,wBAAwB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,CAAC,IAAI,EAAE,CAAC;AAC/F,YAAY,OAAO,CAAC,MAAM,GAAG,wBAAwB;AACrD,iBAAiB,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,QAAQ,CAAC;AACnD,iBAAiB,GAAG,CAAC,CAAC,OAAO,KAAK;AAClC,gBAAgB,MAAM,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,MAAM;AACvD,sBAAsB,CAAC,CAAC,EAAE,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;AACpD,sBAAsB,OAAO,CAAC;AAC9B,gBAAgB,OAAO,CAAC,uBAAuB,EAAE,OAAO,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC;AAC5E,aAAa,CAAC;AACd,iBAAiB,IAAI,CAAC,GAAG,CAAC,CAAC;AAC3B,SAAS;AACT,KAAK;AACL;AACA;AACA,IAAI,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE;AAC1C,QAAQ,GAAG,GAAG,kBAAkB,CAAC,GAAG,EAAE,mBAAmB,CAAC,CAAC;AAC3D,KAAK;AACL,SAAS;AACT,QAAQ,IAAI,MAAM,IAAI,mBAAmB,EAAE;AAC3C,YAAY,IAAI,GAAG,mBAAmB,CAAC,IAAI,CAAC;AAC5C,SAAS;AACT,aAAa;AACb,YAAY,IAAI,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,MAAM,EAAE;AACzD,gBAAgB,IAAI,GAAG,mBAAmB,CAAC;AAC3C,aAAa;AACb,iBAAiB;AACjB,gBAAgB,OAAO,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC;AAC9C,aAAa;AACb,SAAS;AACT,KAAK;AACL;AACA,IAAI,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;AACjE,QAAQ,OAAO,CAAC,cAAc,CAAC,GAAG,iCAAiC,CAAC;AACpE,KAAK;AACL;AACA;AACA,IAAI,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;AAC1E,QAAQ,IAAI,GAAG,EAAE,CAAC;AAClB,KAAK;AACL;AACA,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,EAAE,OAAO,IAAI,KAAK,WAAW,GAAG,EAAE,IAAI,EAAE,GAAG,IAAI,EAAE,OAAO,CAAC,OAAO,GAAG,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AACzJ,CAAC;;AC9EM,SAAS,oBAAoB,CAAC,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE;AAC/D,IAAI,OAAO,KAAK,CAAC,KAAK,CAAC,QAAQ,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC,CAAC;AAClD,CAAC;;ACDM,SAAS,YAAY,CAAC,WAAW,EAAE,WAAW,EAAE;AACvD,IAAI,MAAM,QAAQ,GAAG,KAAK,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC;AACrD,IAAI,MAAM,QAAQ,GAAG,oBAAoB,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;AAC/D,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE;AACnC,QAAQ,QAAQ;AAChB,QAAQ,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AACnD,QAAQ,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AACzC,QAAQ,KAAK;AACb,KAAK,CAAC,CAAC;AACP,CAAC;;ACZM,MAAM,OAAO,GAAG,mBAAmB,CAAC;;ACE3C,MAAM,SAAS,GAAG,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC,CAAC;AACrE;AACA;AACA,AAAO,MAAM,QAAQ,GAAG;AACxB,IAAI,MAAM,EAAE,KAAK;AACjB,IAAI,OAAO,EAAE,wBAAwB;AACrC,IAAI,OAAO,EAAE;AACb,QAAQ,MAAM,EAAE,gCAAgC;AAChD,QAAQ,YAAY,EAAE,SAAS;AAC/B,KAAK;AACL,IAAI,SAAS,EAAE;AACf,QAAQ,MAAM,EAAE,EAAE;AAClB,QAAQ,QAAQ,EAAE,EAAE;AACpB,KAAK;AACL,CAAC,CAAC;;ACdU,MAAC,QAAQ,GAAG,YAAY,CAAC,IAAI,EAAE,QAAQ,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/endpoint/package.json b/node_modules/@octokit/endpoint/package.json index 6f653ef6..4e4d4255 100644 --- a/node_modules/@octokit/endpoint/package.json +++ b/node_modules/@octokit/endpoint/package.json @@ -1,7 +1,7 @@ { "name": "@octokit/endpoint", "description": "Turns REST API endpoints into generic request options", - "version": "6.0.11", + "version": "6.0.12", "license": "MIT", "files": [ "dist-*/", @@ -15,14 +15,7 @@ "api", "rest" ], - "homepage": "https://github.com/octokit/endpoint.js#readme", - "bugs": { - "url": "https://github.com/octokit/endpoint.js/issues" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/octokit/endpoint.js.git" - }, + "repository": "github:octokit/endpoint.js", "dependencies": { "@octokit/types": "^6.0.3", "is-plain-object": "^5.0.0", @@ -34,11 +27,11 @@ "@pika/plugin-build-web": "^0.9.0", "@pika/plugin-ts-standard-pkg": "^0.9.0", "@types/jest": "^26.0.0", - "jest": "^26.0.1", - "prettier": "2.2.1", + "jest": "^27.0.0", + "prettier": "2.3.1", "semantic-release": "^17.0.0", "semantic-release-plugin-update-version-in-files": "^1.0.0", - "ts-jest": "^26.0.0", + "ts-jest": "^27.0.0-next.12", "typescript": "^4.0.2" }, "publishConfig": { diff --git a/node_modules/@octokit/graphql/README.md b/node_modules/@octokit/graphql/README.md index 8dfc626b..7ba46fbd 100644 --- a/node_modules/@octokit/graphql/README.md +++ b/node_modules/@octokit/graphql/README.md @@ -258,7 +258,7 @@ await myGraphql(` Additionally, `GraphQlQueryResponseData` has been exposed to users: ```ts -import type { GraphQlQueryResponseData } from "octokit/graphql"; +import type { GraphQlQueryResponseData } from "@octokit/graphql"; ``` ## Errors diff --git a/node_modules/@octokit/graphql/dist-node/index.js b/node_modules/@octokit/graphql/dist-node/index.js index 24987b87..3706e2b2 100644 --- a/node_modules/@octokit/graphql/dist-node/index.js +++ b/node_modules/@octokit/graphql/dist-node/index.js @@ -5,7 +5,7 @@ Object.defineProperty(exports, '__esModule', { value: true }); var request = require('@octokit/request'); var universalUserAgent = require('universal-user-agent'); -const VERSION = "4.6.1"; +const VERSION = "4.6.4"; class GraphqlError extends Error { constructor(request, response) { diff --git a/node_modules/@octokit/graphql/dist-node/index.js.map b/node_modules/@octokit/graphql/dist-node/index.js.map index 7f424faa..05133848 100644 --- a/node_modules/@octokit/graphql/dist-node/index.js.map +++ b/node_modules/@octokit/graphql/dist-node/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/error.js","../dist-src/graphql.js","../dist-src/with-defaults.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"4.6.1\";\n","export class GraphqlError extends Error {\n constructor(request, response) {\n const message = response.data.errors[0].message;\n super(message);\n Object.assign(this, response.data);\n Object.assign(this, { headers: response.headers });\n this.name = \"GraphqlError\";\n this.request = request;\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n}\n","import { GraphqlError } from \"./error\";\nconst NON_VARIABLE_OPTIONS = [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"query\",\n \"mediaType\",\n];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nexport function graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))\n continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({ query }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request(requestOptions).then((response) => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlError(requestOptions, {\n headers,\n data: response.data,\n });\n }\n return response.data.data;\n });\n}\n","import { request as Request } from \"@octokit/request\";\nimport { graphql } from \"./graphql\";\nexport function withDefaults(request, newDefaults) {\n const newRequest = request.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: Request.endpoint,\n });\n}\n","import { request } from \"@octokit/request\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport { withDefaults } from \"./with-defaults\";\nexport const graphql = withDefaults(request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${getUserAgent()}`,\n },\n method: \"POST\",\n url: \"/graphql\",\n});\nexport function withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\",\n });\n}\n"],"names":["VERSION","GraphqlError","Error","constructor","request","response","message","data","errors","Object","assign","headers","name","captureStackTrace","NON_VARIABLE_OPTIONS","FORBIDDEN_VARIABLE_OPTIONS","GHES_V3_SUFFIX_REGEX","graphql","query","options","Promise","reject","key","includes","parsedOptions","requestOptions","keys","reduce","result","variables","baseUrl","endpoint","DEFAULTS","test","url","replace","then","withDefaults","newDefaults","newRequest","defaults","newApi","bind","Request","getUserAgent","method","withCustomRequest","customRequest"],"mappings":";;;;;;;AAAO,MAAMA,OAAO,GAAG,mBAAhB;;ACAA,MAAMC,YAAN,SAA2BC,KAA3B,CAAiC;AACpCC,EAAAA,WAAW,CAACC,OAAD,EAAUC,QAAV,EAAoB;AAC3B,UAAMC,OAAO,GAAGD,QAAQ,CAACE,IAAT,CAAcC,MAAd,CAAqB,CAArB,EAAwBF,OAAxC;AACA,UAAMA,OAAN;AACAG,IAAAA,MAAM,CAACC,MAAP,CAAc,IAAd,EAAoBL,QAAQ,CAACE,IAA7B;AACAE,IAAAA,MAAM,CAACC,MAAP,CAAc,IAAd,EAAoB;AAAEC,MAAAA,OAAO,EAAEN,QAAQ,CAACM;AAApB,KAApB;AACA,SAAKC,IAAL,GAAY,cAAZ;AACA,SAAKR,OAAL,GAAeA,OAAf,CAN2B;;AAQ3B;;AACA,QAAIF,KAAK,CAACW,iBAAV,EAA6B;AACzBX,MAAAA,KAAK,CAACW,iBAAN,CAAwB,IAAxB,EAA8B,KAAKV,WAAnC;AACH;AACJ;;AAbmC;;ACCxC,MAAMW,oBAAoB,GAAG,CACzB,QADyB,EAEzB,SAFyB,EAGzB,KAHyB,EAIzB,SAJyB,EAKzB,SALyB,EAMzB,OANyB,EAOzB,WAPyB,CAA7B;AASA,MAAMC,0BAA0B,GAAG,CAAC,OAAD,EAAU,QAAV,EAAoB,KAApB,CAAnC;AACA,MAAMC,oBAAoB,GAAG,eAA7B;AACA,AAAO,SAASC,OAAT,CAAiBb,OAAjB,EAA0Bc,KAA1B,EAAiCC,OAAjC,EAA0C;AAC7C,MAAIA,OAAJ,EAAa;AACT,QAAI,OAAOD,KAAP,KAAiB,QAAjB,IAA6B,WAAWC,OAA5C,EAAqD;AACjD,aAAOC,OAAO,CAACC,MAAR,CAAe,IAAInB,KAAJ,CAAW,4DAAX,CAAf,CAAP;AACH;;AACD,SAAK,MAAMoB,GAAX,IAAkBH,OAAlB,EAA2B;AACvB,UAAI,CAACJ,0BAA0B,CAACQ,QAA3B,CAAoCD,GAApC,CAAL,EACI;AACJ,aAAOF,OAAO,CAACC,MAAR,CAAe,IAAInB,KAAJ,CAAW,uBAAsBoB,GAAI,mCAArC,CAAf,CAAP;AACH;AACJ;;AACD,QAAME,aAAa,GAAG,OAAON,KAAP,KAAiB,QAAjB,GAA4BT,MAAM,CAACC,MAAP,CAAc;AAAEQ,IAAAA;AAAF,GAAd,EAAyBC,OAAzB,CAA5B,GAAgED,KAAtF;AACA,QAAMO,cAAc,GAAGhB,MAAM,CAACiB,IAAP,CAAYF,aAAZ,EAA2BG,MAA3B,CAAkC,CAACC,MAAD,EAASN,GAAT,KAAiB;AACtE,QAAIR,oBAAoB,CAACS,QAArB,CAA8BD,GAA9B,CAAJ,EAAwC;AACpCM,MAAAA,MAAM,CAACN,GAAD,CAAN,GAAcE,aAAa,CAACF,GAAD,CAA3B;AACA,aAAOM,MAAP;AACH;;AACD,QAAI,CAACA,MAAM,CAACC,SAAZ,EAAuB;AACnBD,MAAAA,MAAM,CAACC,SAAP,GAAmB,EAAnB;AACH;;AACDD,IAAAA,MAAM,CAACC,SAAP,CAAiBP,GAAjB,IAAwBE,aAAa,CAACF,GAAD,CAArC;AACA,WAAOM,MAAP;AACH,GAVsB,EAUpB,EAVoB,CAAvB,CAZ6C;AAwB7C;;AACA,QAAME,OAAO,GAAGN,aAAa,CAACM,OAAd,IAAyB1B,OAAO,CAAC2B,QAAR,CAAiBC,QAAjB,CAA0BF,OAAnE;;AACA,MAAId,oBAAoB,CAACiB,IAArB,CAA0BH,OAA1B,CAAJ,EAAwC;AACpCL,IAAAA,cAAc,CAACS,GAAf,GAAqBJ,OAAO,CAACK,OAAR,CAAgBnB,oBAAhB,EAAsC,cAAtC,CAArB;AACH;;AACD,SAAOZ,OAAO,CAACqB,cAAD,CAAP,CAAwBW,IAAxB,CAA8B/B,QAAD,IAAc;AAC9C,QAAIA,QAAQ,CAACE,IAAT,CAAcC,MAAlB,EAA0B;AACtB,YAAMG,OAAO,GAAG,EAAhB;;AACA,WAAK,MAAMW,GAAX,IAAkBb,MAAM,CAACiB,IAAP,CAAYrB,QAAQ,CAACM,OAArB,CAAlB,EAAiD;AAC7CA,QAAAA,OAAO,CAACW,GAAD,CAAP,GAAejB,QAAQ,CAACM,OAAT,CAAiBW,GAAjB,CAAf;AACH;;AACD,YAAM,IAAIrB,YAAJ,CAAiBwB,cAAjB,EAAiC;AACnCd,QAAAA,OADmC;AAEnCJ,QAAAA,IAAI,EAAEF,QAAQ,CAACE;AAFoB,OAAjC,CAAN;AAIH;;AACD,WAAOF,QAAQ,CAACE,IAAT,CAAcA,IAArB;AACH,GAZM,CAAP;AAaH;;ACpDM,SAAS8B,YAAT,CAAsBjC,SAAtB,EAA+BkC,WAA/B,EAA4C;AAC/C,QAAMC,UAAU,GAAGnC,SAAO,CAACoC,QAAR,CAAiBF,WAAjB,CAAnB;;AACA,QAAMG,MAAM,GAAG,CAACvB,KAAD,EAAQC,OAAR,KAAoB;AAC/B,WAAOF,OAAO,CAACsB,UAAD,EAAarB,KAAb,EAAoBC,OAApB,CAAd;AACH,GAFD;;AAGA,SAAOV,MAAM,CAACC,MAAP,CAAc+B,MAAd,EAAsB;AACzBD,IAAAA,QAAQ,EAAEH,YAAY,CAACK,IAAb,CAAkB,IAAlB,EAAwBH,UAAxB,CADe;AAEzBR,IAAAA,QAAQ,EAAEY,eAAO,CAACZ;AAFO,GAAtB,CAAP;AAIH;;MCPYd,SAAO,GAAGoB,YAAY,CAACjC,eAAD,EAAU;AACzCO,EAAAA,OAAO,EAAE;AACL,kBAAe,sBAAqBX,OAAQ,IAAG4C,+BAAY,EAAG;AADzD,GADgC;AAIzCC,EAAAA,MAAM,EAAE,MAJiC;AAKzCX,EAAAA,GAAG,EAAE;AALoC,CAAV,CAA5B;AAOP,AAAO,SAASY,iBAAT,CAA2BC,aAA3B,EAA0C;AAC7C,SAAOV,YAAY,CAACU,aAAD,EAAgB;AAC/BF,IAAAA,MAAM,EAAE,MADuB;AAE/BX,IAAAA,GAAG,EAAE;AAF0B,GAAhB,CAAnB;AAIH;;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/error.js","../dist-src/graphql.js","../dist-src/with-defaults.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"4.6.4\";\n","export class GraphqlError extends Error {\n constructor(request, response) {\n const message = response.data.errors[0].message;\n super(message);\n Object.assign(this, response.data);\n Object.assign(this, { headers: response.headers });\n this.name = \"GraphqlError\";\n this.request = request;\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n}\n","import { GraphqlError } from \"./error\";\nconst NON_VARIABLE_OPTIONS = [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"query\",\n \"mediaType\",\n];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nexport function graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))\n continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({ query }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request(requestOptions).then((response) => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlError(requestOptions, {\n headers,\n data: response.data,\n });\n }\n return response.data.data;\n });\n}\n","import { request as Request } from \"@octokit/request\";\nimport { graphql } from \"./graphql\";\nexport function withDefaults(request, newDefaults) {\n const newRequest = request.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: Request.endpoint,\n });\n}\n","import { request } from \"@octokit/request\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport { withDefaults } from \"./with-defaults\";\nexport const graphql = withDefaults(request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${getUserAgent()}`,\n },\n method: \"POST\",\n url: \"/graphql\",\n});\nexport function withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\",\n });\n}\n"],"names":["VERSION","GraphqlError","Error","constructor","request","response","message","data","errors","Object","assign","headers","name","captureStackTrace","NON_VARIABLE_OPTIONS","FORBIDDEN_VARIABLE_OPTIONS","GHES_V3_SUFFIX_REGEX","graphql","query","options","Promise","reject","key","includes","parsedOptions","requestOptions","keys","reduce","result","variables","baseUrl","endpoint","DEFAULTS","test","url","replace","then","withDefaults","newDefaults","newRequest","defaults","newApi","bind","Request","getUserAgent","method","withCustomRequest","customRequest"],"mappings":";;;;;;;AAAO,MAAMA,OAAO,GAAG,mBAAhB;;ACAA,MAAMC,YAAN,SAA2BC,KAA3B,CAAiC;AACpCC,EAAAA,WAAW,CAACC,OAAD,EAAUC,QAAV,EAAoB;AAC3B,UAAMC,OAAO,GAAGD,QAAQ,CAACE,IAAT,CAAcC,MAAd,CAAqB,CAArB,EAAwBF,OAAxC;AACA,UAAMA,OAAN;AACAG,IAAAA,MAAM,CAACC,MAAP,CAAc,IAAd,EAAoBL,QAAQ,CAACE,IAA7B;AACAE,IAAAA,MAAM,CAACC,MAAP,CAAc,IAAd,EAAoB;AAAEC,MAAAA,OAAO,EAAEN,QAAQ,CAACM;AAApB,KAApB;AACA,SAAKC,IAAL,GAAY,cAAZ;AACA,SAAKR,OAAL,GAAeA,OAAf,CAN2B;;AAQ3B;;AACA,QAAIF,KAAK,CAACW,iBAAV,EAA6B;AACzBX,MAAAA,KAAK,CAACW,iBAAN,CAAwB,IAAxB,EAA8B,KAAKV,WAAnC;AACH;AACJ;;AAbmC;;ACCxC,MAAMW,oBAAoB,GAAG,CACzB,QADyB,EAEzB,SAFyB,EAGzB,KAHyB,EAIzB,SAJyB,EAKzB,SALyB,EAMzB,OANyB,EAOzB,WAPyB,CAA7B;AASA,MAAMC,0BAA0B,GAAG,CAAC,OAAD,EAAU,QAAV,EAAoB,KAApB,CAAnC;AACA,MAAMC,oBAAoB,GAAG,eAA7B;AACA,AAAO,SAASC,OAAT,CAAiBb,OAAjB,EAA0Bc,KAA1B,EAAiCC,OAAjC,EAA0C;AAC7C,MAAIA,OAAJ,EAAa;AACT,QAAI,OAAOD,KAAP,KAAiB,QAAjB,IAA6B,WAAWC,OAA5C,EAAqD;AACjD,aAAOC,OAAO,CAACC,MAAR,CAAe,IAAInB,KAAJ,CAAW,4DAAX,CAAf,CAAP;AACH;;AACD,SAAK,MAAMoB,GAAX,IAAkBH,OAAlB,EAA2B;AACvB,UAAI,CAACJ,0BAA0B,CAACQ,QAA3B,CAAoCD,GAApC,CAAL,EACI;AACJ,aAAOF,OAAO,CAACC,MAAR,CAAe,IAAInB,KAAJ,CAAW,uBAAsBoB,GAAI,mCAArC,CAAf,CAAP;AACH;AACJ;;AACD,QAAME,aAAa,GAAG,OAAON,KAAP,KAAiB,QAAjB,GAA4BT,MAAM,CAACC,MAAP,CAAc;AAAEQ,IAAAA;AAAF,GAAd,EAAyBC,OAAzB,CAA5B,GAAgED,KAAtF;AACA,QAAMO,cAAc,GAAGhB,MAAM,CAACiB,IAAP,CAAYF,aAAZ,EAA2BG,MAA3B,CAAkC,CAACC,MAAD,EAASN,GAAT,KAAiB;AACtE,QAAIR,oBAAoB,CAACS,QAArB,CAA8BD,GAA9B,CAAJ,EAAwC;AACpCM,MAAAA,MAAM,CAACN,GAAD,CAAN,GAAcE,aAAa,CAACF,GAAD,CAA3B;AACA,aAAOM,MAAP;AACH;;AACD,QAAI,CAACA,MAAM,CAACC,SAAZ,EAAuB;AACnBD,MAAAA,MAAM,CAACC,SAAP,GAAmB,EAAnB;AACH;;AACDD,IAAAA,MAAM,CAACC,SAAP,CAAiBP,GAAjB,IAAwBE,aAAa,CAACF,GAAD,CAArC;AACA,WAAOM,MAAP;AACH,GAVsB,EAUpB,EAVoB,CAAvB,CAZ6C;AAwB7C;;AACA,QAAME,OAAO,GAAGN,aAAa,CAACM,OAAd,IAAyB1B,OAAO,CAAC2B,QAAR,CAAiBC,QAAjB,CAA0BF,OAAnE;;AACA,MAAId,oBAAoB,CAACiB,IAArB,CAA0BH,OAA1B,CAAJ,EAAwC;AACpCL,IAAAA,cAAc,CAACS,GAAf,GAAqBJ,OAAO,CAACK,OAAR,CAAgBnB,oBAAhB,EAAsC,cAAtC,CAArB;AACH;;AACD,SAAOZ,OAAO,CAACqB,cAAD,CAAP,CAAwBW,IAAxB,CAA8B/B,QAAD,IAAc;AAC9C,QAAIA,QAAQ,CAACE,IAAT,CAAcC,MAAlB,EAA0B;AACtB,YAAMG,OAAO,GAAG,EAAhB;;AACA,WAAK,MAAMW,GAAX,IAAkBb,MAAM,CAACiB,IAAP,CAAYrB,QAAQ,CAACM,OAArB,CAAlB,EAAiD;AAC7CA,QAAAA,OAAO,CAACW,GAAD,CAAP,GAAejB,QAAQ,CAACM,OAAT,CAAiBW,GAAjB,CAAf;AACH;;AACD,YAAM,IAAIrB,YAAJ,CAAiBwB,cAAjB,EAAiC;AACnCd,QAAAA,OADmC;AAEnCJ,QAAAA,IAAI,EAAEF,QAAQ,CAACE;AAFoB,OAAjC,CAAN;AAIH;;AACD,WAAOF,QAAQ,CAACE,IAAT,CAAcA,IAArB;AACH,GAZM,CAAP;AAaH;;ACpDM,SAAS8B,YAAT,CAAsBjC,SAAtB,EAA+BkC,WAA/B,EAA4C;AAC/C,QAAMC,UAAU,GAAGnC,SAAO,CAACoC,QAAR,CAAiBF,WAAjB,CAAnB;;AACA,QAAMG,MAAM,GAAG,CAACvB,KAAD,EAAQC,OAAR,KAAoB;AAC/B,WAAOF,OAAO,CAACsB,UAAD,EAAarB,KAAb,EAAoBC,OAApB,CAAd;AACH,GAFD;;AAGA,SAAOV,MAAM,CAACC,MAAP,CAAc+B,MAAd,EAAsB;AACzBD,IAAAA,QAAQ,EAAEH,YAAY,CAACK,IAAb,CAAkB,IAAlB,EAAwBH,UAAxB,CADe;AAEzBR,IAAAA,QAAQ,EAAEY,eAAO,CAACZ;AAFO,GAAtB,CAAP;AAIH;;MCPYd,SAAO,GAAGoB,YAAY,CAACjC,eAAD,EAAU;AACzCO,EAAAA,OAAO,EAAE;AACL,kBAAe,sBAAqBX,OAAQ,IAAG4C,+BAAY,EAAG;AADzD,GADgC;AAIzCC,EAAAA,MAAM,EAAE,MAJiC;AAKzCX,EAAAA,GAAG,EAAE;AALoC,CAAV,CAA5B;AAOP,AAAO,SAASY,iBAAT,CAA2BC,aAA3B,EAA0C;AAC7C,SAAOV,YAAY,CAACU,aAAD,EAAgB;AAC/BF,IAAAA,MAAM,EAAE,MADuB;AAE/BX,IAAAA,GAAG,EAAE;AAF0B,GAAhB,CAAnB;AAIH;;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/graphql/dist-src/version.js b/node_modules/@octokit/graphql/dist-src/version.js index 4bc1a411..202b8c64 100644 --- a/node_modules/@octokit/graphql/dist-src/version.js +++ b/node_modules/@octokit/graphql/dist-src/version.js @@ -1 +1 @@ -export const VERSION = "4.6.1"; +export const VERSION = "4.6.4"; diff --git a/node_modules/@octokit/graphql/dist-types/version.d.ts b/node_modules/@octokit/graphql/dist-types/version.d.ts index f92f04e7..c1c71f79 100644 --- a/node_modules/@octokit/graphql/dist-types/version.d.ts +++ b/node_modules/@octokit/graphql/dist-types/version.d.ts @@ -1 +1 @@ -export declare const VERSION = "4.6.1"; +export declare const VERSION = "4.6.4"; diff --git a/node_modules/@octokit/graphql/dist-web/index.js b/node_modules/@octokit/graphql/dist-web/index.js index b544d554..79868ba4 100644 --- a/node_modules/@octokit/graphql/dist-web/index.js +++ b/node_modules/@octokit/graphql/dist-web/index.js @@ -1,7 +1,7 @@ import { request } from '@octokit/request'; import { getUserAgent } from 'universal-user-agent'; -const VERSION = "4.6.1"; +const VERSION = "4.6.4"; class GraphqlError extends Error { constructor(request, response) { diff --git a/node_modules/@octokit/graphql/dist-web/index.js.map b/node_modules/@octokit/graphql/dist-web/index.js.map index eb04e1ef..8fcf95ad 100644 --- a/node_modules/@octokit/graphql/dist-web/index.js.map +++ b/node_modules/@octokit/graphql/dist-web/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/error.js","../dist-src/graphql.js","../dist-src/with-defaults.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"4.6.1\";\n","export class GraphqlError extends Error {\n constructor(request, response) {\n const message = response.data.errors[0].message;\n super(message);\n Object.assign(this, response.data);\n Object.assign(this, { headers: response.headers });\n this.name = \"GraphqlError\";\n this.request = request;\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n}\n","import { GraphqlError } from \"./error\";\nconst NON_VARIABLE_OPTIONS = [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"query\",\n \"mediaType\",\n];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nexport function graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))\n continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({ query }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request(requestOptions).then((response) => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlError(requestOptions, {\n headers,\n data: response.data,\n });\n }\n return response.data.data;\n });\n}\n","import { request as Request } from \"@octokit/request\";\nimport { graphql } from \"./graphql\";\nexport function withDefaults(request, newDefaults) {\n const newRequest = request.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: Request.endpoint,\n });\n}\n","import { request } from \"@octokit/request\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport { withDefaults } from \"./with-defaults\";\nexport const graphql = withDefaults(request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${getUserAgent()}`,\n },\n method: \"POST\",\n url: \"/graphql\",\n});\nexport function withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\",\n });\n}\n"],"names":["request","Request","graphql"],"mappings":";;;AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACAnC,MAAM,YAAY,SAAS,KAAK,CAAC;AACxC,IAAI,WAAW,CAAC,OAAO,EAAE,QAAQ,EAAE;AACnC,QAAQ,MAAM,OAAO,GAAG,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;AACxD,QAAQ,KAAK,CAAC,OAAO,CAAC,CAAC;AACvB,QAAQ,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,QAAQ,CAAC,IAAI,CAAC,CAAC;AAC3C,QAAQ,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,EAAE,OAAO,EAAE,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC;AAC3D,QAAQ,IAAI,CAAC,IAAI,GAAG,cAAc,CAAC;AACnC,QAAQ,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AAC/B;AACA;AACA,QAAQ,IAAI,KAAK,CAAC,iBAAiB,EAAE;AACrC,YAAY,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;AAC5D,SAAS;AACT,KAAK;AACL,CAAC;;ACbD,MAAM,oBAAoB,GAAG;AAC7B,IAAI,QAAQ;AACZ,IAAI,SAAS;AACb,IAAI,KAAK;AACT,IAAI,SAAS;AACb,IAAI,SAAS;AACb,IAAI,OAAO;AACX,IAAI,WAAW;AACf,CAAC,CAAC;AACF,MAAM,0BAA0B,GAAG,CAAC,OAAO,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;AAC9D,MAAM,oBAAoB,GAAG,eAAe,CAAC;AAC7C,AAAO,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE;AACjD,IAAI,IAAI,OAAO,EAAE;AACjB,QAAQ,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,IAAI,OAAO,EAAE;AAC7D,YAAY,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,CAAC,0DAA0D,CAAC,CAAC,CAAC,CAAC;AAC3G,SAAS;AACT,QAAQ,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;AACnC,YAAY,IAAI,CAAC,0BAA0B,CAAC,QAAQ,CAAC,GAAG,CAAC;AACzD,gBAAgB,SAAS;AACzB,YAAY,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,CAAC,oBAAoB,EAAE,GAAG,CAAC,iCAAiC,CAAC,CAAC,CAAC,CAAC;AAC5G,SAAS;AACT,KAAK;AACL,IAAI,MAAM,aAAa,GAAG,OAAO,KAAK,KAAK,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,EAAE,OAAO,CAAC,GAAG,KAAK,CAAC;AAChG,IAAI,MAAM,cAAc,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,GAAG,KAAK;AAC9E,QAAQ,IAAI,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;AAChD,YAAY,MAAM,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;AAC7C,YAAY,OAAO,MAAM,CAAC;AAC1B,SAAS;AACT,QAAQ,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;AAC/B,YAAY,MAAM,CAAC,SAAS,GAAG,EAAE,CAAC;AAClC,SAAS;AACT,QAAQ,MAAM,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;AACnD,QAAQ,OAAO,MAAM,CAAC;AACtB,KAAK,EAAE,EAAE,CAAC,CAAC;AACX;AACA;AACA,IAAI,MAAM,OAAO,GAAG,aAAa,CAAC,OAAO,IAAI,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC;AAC/E,IAAI,IAAI,oBAAoB,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE;AAC5C,QAAQ,cAAc,CAAC,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC,oBAAoB,EAAE,cAAc,CAAC,CAAC;AACnF,KAAK;AACL,IAAI,OAAO,OAAO,CAAC,cAAc,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,KAAK;AACtD,QAAQ,IAAI,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC,YAAY,MAAM,OAAO,GAAG,EAAE,CAAC;AAC/B,YAAY,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;AAC7D,gBAAgB,OAAO,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AACrD,aAAa;AACb,YAAY,MAAM,IAAI,YAAY,CAAC,cAAc,EAAE;AACnD,gBAAgB,OAAO;AACvB,gBAAgB,IAAI,EAAE,QAAQ,CAAC,IAAI;AACnC,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,OAAO,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC;AAClC,KAAK,CAAC,CAAC;AACP,CAAC;;ACpDM,SAAS,YAAY,CAACA,SAAO,EAAE,WAAW,EAAE;AACnD,IAAI,MAAM,UAAU,GAAGA,SAAO,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;AACrD,IAAI,MAAM,MAAM,GAAG,CAAC,KAAK,EAAE,OAAO,KAAK;AACvC,QAAQ,OAAO,OAAO,CAAC,UAAU,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AACnD,KAAK,CAAC;AACN,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE;AACjC,QAAQ,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,UAAU,CAAC;AACrD,QAAQ,QAAQ,EAAEC,OAAO,CAAC,QAAQ;AAClC,KAAK,CAAC,CAAC;AACP,CAAC;;ACPW,MAACC,SAAO,GAAG,YAAY,CAAC,OAAO,EAAE;AAC7C,IAAI,OAAO,EAAE;AACb,QAAQ,YAAY,EAAE,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC;AACvE,KAAK;AACL,IAAI,MAAM,EAAE,MAAM;AAClB,IAAI,GAAG,EAAE,UAAU;AACnB,CAAC,CAAC,CAAC;AACH,AAAO,SAAS,iBAAiB,CAAC,aAAa,EAAE;AACjD,IAAI,OAAO,YAAY,CAAC,aAAa,EAAE;AACvC,QAAQ,MAAM,EAAE,MAAM;AACtB,QAAQ,GAAG,EAAE,UAAU;AACvB,KAAK,CAAC,CAAC;AACP,CAAC;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/error.js","../dist-src/graphql.js","../dist-src/with-defaults.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"4.6.4\";\n","export class GraphqlError extends Error {\n constructor(request, response) {\n const message = response.data.errors[0].message;\n super(message);\n Object.assign(this, response.data);\n Object.assign(this, { headers: response.headers });\n this.name = \"GraphqlError\";\n this.request = request;\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n}\n","import { GraphqlError } from \"./error\";\nconst NON_VARIABLE_OPTIONS = [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"query\",\n \"mediaType\",\n];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nexport function graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))\n continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({ query }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request(requestOptions).then((response) => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlError(requestOptions, {\n headers,\n data: response.data,\n });\n }\n return response.data.data;\n });\n}\n","import { request as Request } from \"@octokit/request\";\nimport { graphql } from \"./graphql\";\nexport function withDefaults(request, newDefaults) {\n const newRequest = request.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: Request.endpoint,\n });\n}\n","import { request } from \"@octokit/request\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport { withDefaults } from \"./with-defaults\";\nexport const graphql = withDefaults(request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${getUserAgent()}`,\n },\n method: \"POST\",\n url: \"/graphql\",\n});\nexport function withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\",\n });\n}\n"],"names":["request","Request","graphql"],"mappings":";;;AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACAnC,MAAM,YAAY,SAAS,KAAK,CAAC;AACxC,IAAI,WAAW,CAAC,OAAO,EAAE,QAAQ,EAAE;AACnC,QAAQ,MAAM,OAAO,GAAG,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;AACxD,QAAQ,KAAK,CAAC,OAAO,CAAC,CAAC;AACvB,QAAQ,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,QAAQ,CAAC,IAAI,CAAC,CAAC;AAC3C,QAAQ,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,EAAE,OAAO,EAAE,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC;AAC3D,QAAQ,IAAI,CAAC,IAAI,GAAG,cAAc,CAAC;AACnC,QAAQ,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AAC/B;AACA;AACA,QAAQ,IAAI,KAAK,CAAC,iBAAiB,EAAE;AACrC,YAAY,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;AAC5D,SAAS;AACT,KAAK;AACL,CAAC;;ACbD,MAAM,oBAAoB,GAAG;AAC7B,IAAI,QAAQ;AACZ,IAAI,SAAS;AACb,IAAI,KAAK;AACT,IAAI,SAAS;AACb,IAAI,SAAS;AACb,IAAI,OAAO;AACX,IAAI,WAAW;AACf,CAAC,CAAC;AACF,MAAM,0BAA0B,GAAG,CAAC,OAAO,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;AAC9D,MAAM,oBAAoB,GAAG,eAAe,CAAC;AAC7C,AAAO,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE;AACjD,IAAI,IAAI,OAAO,EAAE;AACjB,QAAQ,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,IAAI,OAAO,EAAE;AAC7D,YAAY,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,CAAC,0DAA0D,CAAC,CAAC,CAAC,CAAC;AAC3G,SAAS;AACT,QAAQ,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;AACnC,YAAY,IAAI,CAAC,0BAA0B,CAAC,QAAQ,CAAC,GAAG,CAAC;AACzD,gBAAgB,SAAS;AACzB,YAAY,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,CAAC,oBAAoB,EAAE,GAAG,CAAC,iCAAiC,CAAC,CAAC,CAAC,CAAC;AAC5G,SAAS;AACT,KAAK;AACL,IAAI,MAAM,aAAa,GAAG,OAAO,KAAK,KAAK,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,EAAE,OAAO,CAAC,GAAG,KAAK,CAAC;AAChG,IAAI,MAAM,cAAc,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,GAAG,KAAK;AAC9E,QAAQ,IAAI,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;AAChD,YAAY,MAAM,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;AAC7C,YAAY,OAAO,MAAM,CAAC;AAC1B,SAAS;AACT,QAAQ,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;AAC/B,YAAY,MAAM,CAAC,SAAS,GAAG,EAAE,CAAC;AAClC,SAAS;AACT,QAAQ,MAAM,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;AACnD,QAAQ,OAAO,MAAM,CAAC;AACtB,KAAK,EAAE,EAAE,CAAC,CAAC;AACX;AACA;AACA,IAAI,MAAM,OAAO,GAAG,aAAa,CAAC,OAAO,IAAI,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC;AAC/E,IAAI,IAAI,oBAAoB,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE;AAC5C,QAAQ,cAAc,CAAC,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC,oBAAoB,EAAE,cAAc,CAAC,CAAC;AACnF,KAAK;AACL,IAAI,OAAO,OAAO,CAAC,cAAc,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,KAAK;AACtD,QAAQ,IAAI,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC,YAAY,MAAM,OAAO,GAAG,EAAE,CAAC;AAC/B,YAAY,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;AAC7D,gBAAgB,OAAO,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AACrD,aAAa;AACb,YAAY,MAAM,IAAI,YAAY,CAAC,cAAc,EAAE;AACnD,gBAAgB,OAAO;AACvB,gBAAgB,IAAI,EAAE,QAAQ,CAAC,IAAI;AACnC,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,OAAO,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC;AAClC,KAAK,CAAC,CAAC;AACP,CAAC;;ACpDM,SAAS,YAAY,CAACA,SAAO,EAAE,WAAW,EAAE;AACnD,IAAI,MAAM,UAAU,GAAGA,SAAO,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;AACrD,IAAI,MAAM,MAAM,GAAG,CAAC,KAAK,EAAE,OAAO,KAAK;AACvC,QAAQ,OAAO,OAAO,CAAC,UAAU,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AACnD,KAAK,CAAC;AACN,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE;AACjC,QAAQ,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,UAAU,CAAC;AACrD,QAAQ,QAAQ,EAAEC,OAAO,CAAC,QAAQ;AAClC,KAAK,CAAC,CAAC;AACP,CAAC;;ACPW,MAACC,SAAO,GAAG,YAAY,CAAC,OAAO,EAAE;AAC7C,IAAI,OAAO,EAAE;AACb,QAAQ,YAAY,EAAE,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC;AACvE,KAAK;AACL,IAAI,MAAM,EAAE,MAAM;AAClB,IAAI,GAAG,EAAE,UAAU;AACnB,CAAC,CAAC,CAAC;AACH,AAAO,SAAS,iBAAiB,CAAC,aAAa,EAAE;AACjD,IAAI,OAAO,YAAY,CAAC,aAAa,EAAE;AACvC,QAAQ,MAAM,EAAE,MAAM;AACtB,QAAQ,GAAG,EAAE,UAAU;AACvB,KAAK,CAAC,CAAC;AACP,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/graphql/package.json b/node_modules/@octokit/graphql/package.json index 4b803356..a26fad72 100644 --- a/node_modules/@octokit/graphql/package.json +++ b/node_modules/@octokit/graphql/package.json @@ -1,7 +1,7 @@ { "name": "@octokit/graphql", "description": "GitHub GraphQL API client for browsers and Node", - "version": "4.6.1", + "version": "4.6.4", "license": "MIT", "files": [ "dist-*/", @@ -17,7 +17,7 @@ ], "repository": "github:octokit/graphql.js", "dependencies": { - "@octokit/request": "^5.3.0", + "@octokit/request": "^5.6.0", "@octokit/types": "^6.0.3", "universal-user-agent": "^6.0.0" }, @@ -30,11 +30,11 @@ "@types/jest": "^26.0.0", "@types/node": "^14.0.4", "fetch-mock": "^9.0.0", - "jest": "^26.0.0", - "prettier": "^2.0.0", + "jest": "^27.0.0", + "prettier": "2.3.1", "semantic-release": "^17.0.0", "semantic-release-plugin-update-version-in-files": "^1.0.0", - "ts-jest": "^26.0.0", + "ts-jest": "^27.0.0-next.12", "typescript": "^4.0.0" }, "publishConfig": { diff --git a/node_modules/@octokit/openapi-types/dist-node/index.js b/node_modules/@octokit/openapi-types/dist-node/index.js index f39d5a52..60462582 100644 --- a/node_modules/@octokit/openapi-types/dist-node/index.js +++ b/node_modules/@octokit/openapi-types/dist-node/index.js @@ -2,7 +2,7 @@ Object.defineProperty(exports, '__esModule', { value: true }); -const VERSION = "7.0.0"; +const VERSION = "7.3.2"; exports.VERSION = VERSION; //# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/openapi-types/dist-node/index.js.map b/node_modules/@octokit/openapi-types/dist-node/index.js.map index a33df81d..0e1313e1 100644 --- a/node_modules/@octokit/openapi-types/dist-node/index.js.map +++ b/node_modules/@octokit/openapi-types/dist-node/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/version.js"],"sourcesContent":["export const VERSION = \"7.0.0\";\n"],"names":["VERSION"],"mappings":";;;;MAAaA,OAAO,GAAG;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/version.js"],"sourcesContent":["export const VERSION = \"7.3.2\";\n"],"names":["VERSION"],"mappings":";;;;MAAaA,OAAO,GAAG;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/openapi-types/dist-src/version.js b/node_modules/@octokit/openapi-types/dist-src/version.js index f239fb3c..2084526d 100644 --- a/node_modules/@octokit/openapi-types/dist-src/version.js +++ b/node_modules/@octokit/openapi-types/dist-src/version.js @@ -1 +1 @@ -export const VERSION = "7.0.0"; +export const VERSION = "7.3.2"; diff --git a/node_modules/@octokit/openapi-types/dist-types/generated/types.d.ts b/node_modules/@octokit/openapi-types/dist-types/generated/types.d.ts index 2fd0d76e..bf14b803 100644 --- a/node_modules/@octokit/openapi-types/dist-types/generated/types.d.ts +++ b/node_modules/@octokit/openapi-types/dist-types/generated/types.d.ts @@ -216,16 +216,6 @@ export interface paths { "/codes_of_conduct/{key}": { get: operations["codes-of-conduct/get-conduct-code"]; }; - "/content_references/{content_reference_id}/attachments": { - /** - * Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment. - * - * The app must create a content attachment within six hours of the content reference URL being posted. See "[Using content attachments](https://docs.github.com/apps/using-content-attachments/)" for details about content attachments. - * - * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - post: operations["apps/create-content-attachment"]; - }; "/emojis": { /** Lists all the emojis available to use on GitHub. */ get: operations["emojis/get"]; @@ -1718,11 +1708,6 @@ export interface paths { }; "/projects/columns/{column_id}/cards": { get: operations["projects/list-cards"]; - /** - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by the `pull_request` key. - * - * Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ post: operations["projects/create-card"]; }; "/projects/columns/{column_id}/moves": { @@ -1934,6 +1919,16 @@ export interface paths { /** Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ get: operations["actions/get-reviews-for-run"]; }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/approve": { + /** + * **Note:** This endpoint is currently in beta and is subject to change. + * + * Approves a workflow run for a pull request from a public fork of a first time contributor. For more information, see ["Approving workflow runs from public forks](https://docs.github.com/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + post: operations["actions/approve-workflow-run"]; + }; "/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts": { /** Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ get: operations["actions/list-workflow-run-artifacts"]; @@ -2604,8 +2599,8 @@ export interface paths { * Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint. * * There are two places where you can upload code scanning results. - * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/github/finding-security-vulnerabilities-and-errors-in-your-code/triaging-code-scanning-alerts-in-pull-requests)." - * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/github/finding-security-vulnerabilities-and-errors-in-your-code/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." + * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests)." + * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." * * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: * @@ -2846,6 +2841,61 @@ export interface paths { */ get: operations["repos/get-community-profile-metrics"]; }; + "/repos/{owner}/{repo}/compare/{basehead}": { + /** + * The `basehead` param is comprised of two parts: `base` and `head`. Both must be branch names in `repo`. To compare branches across other repositories in the same network as `repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/compare-commits-with-basehead"]; + }; + "/repos/{owner}/{repo}/content_references/{content_reference_id}/attachments": { + /** + * Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` and `repository` `full_name` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment. + * + * The app must create a content attachment within six hours of the content reference URL being posted. See "[Using content attachments](https://docs.github.com/apps/using-content-attachments/)" for details about content attachments. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + post: operations["apps/create-content-attachment-for-repo"]; + }; "/repos/{owner}/{repo}/contents/{path}": { /** * Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit @@ -3791,6 +3841,10 @@ export interface paths { */ post: operations["repos/upload-release-asset"]; }; + "/repos/{owner}/{repo}/releases/{release_id}/reactions": { + /** Create a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). A response with a `Status: 200 OK` means that you already added the reaction type to this release. */ + post: operations["reactions/create-for-release"]; + }; "/repos/{owner}/{repo}/secret-scanning/alerts": { /** * Lists all secret scanning alerts for a private repository, from newest to oldest. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope. @@ -3952,8 +4006,8 @@ export interface paths { /** * Lists all public repositories in the order that they were created. * - * Notes: - * - For GitHub Enterprise Server and GitHub AE, this endpoint will only list repositories available to all users on the enterprise. + * Note: + * - For GitHub Enterprise Server, this endpoint will only list repositories available to all users on the enterprise. * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of repositories. */ get: operations["repos/list-public"]; @@ -5106,7 +5160,7 @@ export interface paths { }; "/repos/{owner}/{repo}/compare/{base}...{head}": { /** - * Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. + * **Deprecated**: Use `repos.compareCommitsWithBasehead()` (`GET /repos/{owner}/{repo}/compare/{basehead}`) instead. Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. * * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. * @@ -5149,6 +5203,16 @@ export interface paths { */ get: operations["repos/compare-commits"]; }; + "/content_references/{content_reference_id}/attachments": { + /** + * **Deprecated:** use `apps.createContentAttachmentForRepo()` (`POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments`) instead. Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment. + * + * The app must create a content attachment within six hours of the content reference URL being posted. See "[Using content attachments](https://docs.github.com/apps/using-content-attachments/)" for details about content attachments. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + post: operations["apps/create-content-attachment"]; + }; } export interface components { schemas: { @@ -5662,17 +5726,6 @@ export interface components { body?: string; html_url: string | null; }; - /** Content Reference attachments allow you to provide context around URLs posted in comments */ - "content-reference-attachment": { - /** The ID of the attachment */ - id: number; - /** The title of the attachment */ - title: string; - /** The body of the attachment */ - body: string; - /** The node_id of the content attachment */ - node_id?: string; - }; /** The policy that controls the organizations in the enterprise that are allowed to run GitHub Actions. Can be one of: `all`, `none`, or `selected`. */ "enabled-organizations": "all" | "none" | "selected"; /** The permissions policy that controls the actions that are allowed to run. Can be one of: `all`, `local_only`, or `selected`. */ @@ -5781,6 +5834,8 @@ export interface components { /** The time the audit log event was recorded, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). */ created_at?: number; deploy_key_fingerprint?: string; + /** A unique identifier for an audit event. */ + _document_id?: string; emoji?: string; events?: any[]; events_were?: any[]; @@ -7106,6 +7161,7 @@ export interface components { /** Whether anonymous git access is allowed. */ anonymous_access_enabled?: boolean; code_of_conduct?: components["schemas"]["code-of-conduct-simple"]; + has_advanced_security?: boolean; }; /** An artifact */ artifact: { @@ -7766,7 +7822,7 @@ export interface components { /** The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. */ "code-scanning-alert-dismissed-at": string | null; /** **Required when the state is dismissed.** The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. */ - "code-scanning-alert-dismissed-reason": string | null; + "code-scanning-alert-dismissed-reason": ("false positive" | "won't fix" | "used in tests") | null; "code-scanning-alert-rule-summary": { /** A unique identifier for the rule used to detect the alert. */ id?: string | null; @@ -7870,7 +7926,7 @@ export interface components { "code-scanning-analysis-commit-sha": string; /** Identifies the variable values associated with the environment in which this analysis was performed. */ "code-scanning-analysis-environment": string; - /** Identifies the configuration and environment under which the analysis was executed. */ + /** Identifies the configuration under which the analysis was executed. Used to distinguish between multiple analyses for the same tool and commit, but performed on different languages or different parts of the code. */ "code-scanning-analysis-category": string; /** The time that the analysis was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. */ "code-scanning-analysis-created-at": string; @@ -7913,7 +7969,7 @@ export interface components { scimType?: string | null; schemas?: string[]; }; - /** A Base64 string representing the SARIF file to upload. You must first compress your SARIF file using [`gzip`](http://www.gnu.org/software/gzip/manual/gzip.html) and then translate the contents of the file into a Base64 encoding string. For more information, see "[SARIF support for code scanning](https://docs.github.com/github/finding-security-vulnerabilities-and-errors-in-your-code/sarif-support-for-code-scanning)." */ + /** A Base64 string representing the SARIF file to upload. You must first compress your SARIF file using [`gzip`](http://www.gnu.org/software/gzip/manual/gzip.html) and then translate the contents of the file into a Base64 encoding string. For more information, see "[SARIF support for code scanning](https://docs.github.com/code-security/secure-coding/sarif-support-for-code-scanning)." */ "code-scanning-analysis-sarif-file": string; "code-scanning-sarifs-receipt": { id?: components["schemas"]["code-scanning-analysis-sarif-id"]; @@ -8126,6 +8182,7 @@ export interface components { documentation: string | null; files: { code_of_conduct: components["schemas"]["code-of-conduct-simple"] | null; + code_of_conduct_file: components["schemas"]["community-health-file"] | null; license: components["schemas"]["license-simple"] | null; contributing: components["schemas"]["community-health-file"] | null; readme: components["schemas"]["community-health-file"] | null; @@ -8165,6 +8222,17 @@ export interface components { commits: components["schemas"]["commit"][]; files?: components["schemas"]["diff-entry"][]; }; + /** Content Reference attachments allow you to provide context around URLs posted in comments */ + "content-reference-attachment": { + /** The ID of the attachment */ + id: number; + /** The title of the attachment */ + title: string; + /** The body of the attachment */ + body: string; + /** The node_id of the content attachment */ + node_id?: string; + }; /** Content Tree */ "content-tree": { type: string; @@ -9352,11 +9420,12 @@ export interface components { body_text?: string; /** The URL of the release discussion. */ discussion_url?: string; + reactions?: components["schemas"]["reaction-rollup"]; }; /** Sets the state of the secret scanning alert. Can be either `open` or `resolved`. You must provide `resolution` when you set the state to `resolved`. */ "secret-scanning-alert-state": "open" | "resolved"; /** **Required when the `state` is `resolved`.** The reason for resolving the alert. Can be one of `false_positive`, `wont_fix`, `revoked`, or `used_in_tests`. */ - "secret-scanning-alert-resolution": string | null; + "secret-scanning-alert-resolution": ("false_positive" | "wont_fix" | "revoked" | "used_in_tests") | null; "secret-scanning-alert": { number?: components["schemas"]["alert-number"]; created_at?: components["schemas"]["alert-created-at"]; @@ -10068,6 +10137,23 @@ export interface components { starred_at: string; repo: components["schemas"]["repository"]; }; + /** The authorization for a Personal Access Token. */ + "personal-access-token": { + id: number; + url: string; + /** A list of scopes that this authorization is in. */ + scopes: string[] | null; + token: string; + token_last_eight: string | null; + hashed_token: string | null; + note: string | null; + note_url: string | null; + updated_at: string; + created_at: string; + fingerprint: string | null; + user?: components["schemas"]["simple-user"] | null; + expiration: string | null; + }; /** Hovercard */ hovercard: { contexts: { @@ -10165,6 +10251,12 @@ export interface components { "application/json": components["schemas"]["basic-error"]; }; }; + /** Temporary Redirect */ + temporary_redirect: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; /** Response if GitHub Advanced Security is not enabled for this repository */ code_scanning_forbidden_read: { content: { @@ -10351,7 +10443,7 @@ export interface components { event: string; /** Returns workflow runs with the check run `status` or `conclusion` that you specify. For example, a conclusion can be `success` or a status can be `in_progress`. Only GitHub can set a status of `waiting` or `requested`. For a list of the possible `status` and `conclusion` options, see "[Create a check run](https://docs.github.com/rest/reference/checks#create-a-check-run)." */ "workflow-run-status": "completed" | "action_required" | "cancelled" | "failure" | "neutral" | "skipped" | "stale" | "success" | "timed_out" | "in_progress" | "queued" | "requested" | "waiting"; - /** The id of the workflow run */ + /** The id of the workflow run. */ "run-id": number; /** The ID of the workflow. You can also pass the workflow file name as a string. */ "workflow-id": number | string; @@ -11310,44 +11402,6 @@ export interface operations { 415: components["responses"]["preview_header_missing"]; }; }; - /** - * Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment. - * - * The app must create a content attachment within six hours of the content reference URL being posted. See "[Using content attachments](https://docs.github.com/apps/using-content-attachments/)" for details about content attachments. - * - * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - "apps/create-content-attachment": { - parameters: { - path: { - content_reference_id: number; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["content-reference-attachment"]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The title of the attachment */ - title: string; - /** The body of the attachment */ - body: string; - }; - }; - }; - }; /** Lists all the emojis available to use on GitHub. */ "emojis/get": { parameters: {}; @@ -12373,24 +12427,16 @@ export interface operations { }; requestBody: { content: { - "application/json": ((Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; - }>) & { + "application/json": { /** Description of the gist */ description?: string; /** Names of files to be updated */ files?: { [key: string]: Partial<{ [key: string]: any; - }> & Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; }>; }; - }) | null; + } | null; }; }; }; @@ -13798,7 +13844,7 @@ export interface operations { content: { "application/json": { total_count: number; - repositories: components["schemas"]["repository"][]; + repositories: components["schemas"]["minimal-repository"][]; }; }; }; @@ -17332,11 +17378,6 @@ export interface operations { 403: components["responses"]["forbidden"]; }; }; - /** - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by the `pull_request` key. - * - * Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ "projects/create-card": { parameters: { path: { @@ -17749,6 +17790,7 @@ export interface operations { responses: { /** Response */ 204: never; + 307: components["responses"]["temporary_redirect"]; /** If an organization owner has configured the organization to prevent members from deleting organization-owned repositories, a member will get this response: */ 403: { content: { @@ -17776,6 +17818,7 @@ export interface operations { "application/json": components["schemas"]["full-repository"]; }; }; + 307: components["responses"]["temporary_redirect"]; 403: components["responses"]["forbidden"]; 404: components["responses"]["not_found"]; 422: components["responses"]["validation_failed"]; @@ -18230,7 +18273,7 @@ export interface operations { path: { owner: components["parameters"]["owner"]; repo: components["parameters"]["repo"]; - /** The id of the workflow run */ + /** The id of the workflow run. */ run_id: components["parameters"]["run-id"]; }; }; @@ -18253,7 +18296,7 @@ export interface operations { path: { owner: components["parameters"]["owner"]; repo: components["parameters"]["repo"]; - /** The id of the workflow run */ + /** The id of the workflow run. */ run_id: components["parameters"]["run-id"]; }; }; @@ -18268,7 +18311,7 @@ export interface operations { path: { owner: components["parameters"]["owner"]; repo: components["parameters"]["repo"]; - /** The id of the workflow run */ + /** The id of the workflow run. */ run_id: components["parameters"]["run-id"]; }; }; @@ -18281,13 +18324,40 @@ export interface operations { }; }; }; + /** + * **Note:** This endpoint is currently in beta and is subject to change. + * + * Approves a workflow run for a pull request from a public fork of a first time contributor. For more information, see ["Approving workflow runs from public forks](https://docs.github.com/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/approve-workflow-run": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** The id of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; /** Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ "actions/list-workflow-run-artifacts": { parameters: { path: { owner: components["parameters"]["owner"]; repo: components["parameters"]["repo"]; - /** The id of the workflow run */ + /** The id of the workflow run. */ run_id: components["parameters"]["run-id"]; }; query: { @@ -18316,7 +18386,7 @@ export interface operations { path: { owner: components["parameters"]["owner"]; repo: components["parameters"]["repo"]; - /** The id of the workflow run */ + /** The id of the workflow run. */ run_id: components["parameters"]["run-id"]; }; }; @@ -18337,7 +18407,7 @@ export interface operations { path: { owner: components["parameters"]["owner"]; repo: components["parameters"]["repo"]; - /** The id of the workflow run */ + /** The id of the workflow run. */ run_id: components["parameters"]["run-id"]; }; query: { @@ -18377,7 +18447,7 @@ export interface operations { path: { owner: components["parameters"]["owner"]; repo: components["parameters"]["repo"]; - /** The id of the workflow run */ + /** The id of the workflow run. */ run_id: components["parameters"]["run-id"]; }; }; @@ -18392,7 +18462,7 @@ export interface operations { path: { owner: components["parameters"]["owner"]; repo: components["parameters"]["repo"]; - /** The id of the workflow run */ + /** The id of the workflow run. */ run_id: components["parameters"]["run-id"]; }; }; @@ -18411,7 +18481,7 @@ export interface operations { path: { owner: components["parameters"]["owner"]; repo: components["parameters"]["repo"]; - /** The id of the workflow run */ + /** The id of the workflow run. */ run_id: components["parameters"]["run-id"]; }; }; @@ -18434,7 +18504,7 @@ export interface operations { path: { owner: components["parameters"]["owner"]; repo: components["parameters"]["repo"]; - /** The id of the workflow run */ + /** The id of the workflow run. */ run_id: components["parameters"]["run-id"]; }; }; @@ -18465,7 +18535,7 @@ export interface operations { path: { owner: components["parameters"]["owner"]; repo: components["parameters"]["repo"]; - /** The id of the workflow run */ + /** The id of the workflow run. */ run_id: components["parameters"]["run-id"]; }; }; @@ -18490,7 +18560,7 @@ export interface operations { path: { owner: components["parameters"]["owner"]; repo: components["parameters"]["repo"]; - /** The id of the workflow run */ + /** The id of the workflow run. */ run_id: components["parameters"]["run-id"]; }; }; @@ -20716,8 +20786,8 @@ export interface operations { * Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint. * * There are two places where you can upload code scanning results. - * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/github/finding-security-vulnerabilities-and-errors-in-your-code/triaging-code-scanning-alerts-in-pull-requests)." - * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/github/finding-security-vulnerabilities-and-errors-in-your-code/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." + * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests)." + * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." * * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: * @@ -21554,6 +21624,117 @@ export interface operations { }; }; }; + /** + * The `basehead` param is comprised of two parts: `base` and `head`. Both must be branch names in `repo`. To compare branches across other repositories in the same network as `repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/compare-commits-with-basehead": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** The base branch and head branch to compare. This parameter expects the format `{base}...{head}`. */ + basehead: string; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Results per page (max 100) */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comparison"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` and `repository` `full_name` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment. + * + * The app must create a content attachment within six hours of the content reference URL being posted. See "[Using content attachments](https://docs.github.com/apps/using-content-attachments/)" for details about content attachments. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + "apps/create-content-attachment-for-repo": { + parameters: { + path: { + /** The owner of the repository. Determined from the `repository` `full_name` of the `content_reference` event. */ + owner: string; + /** The name of the repository. Determined from the `repository` `full_name` of the `content_reference` event. */ + repo: string; + /** The `id` of the `content_reference` event. */ + content_reference_id: number; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["content-reference-attachment"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 415: components["responses"]["preview_header_missing"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** The title of the attachment */ + title: string; + /** The body of the attachment */ + body: string; + }; + }; + }; + }; /** * Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit * `:path`, you will receive the contents of the repository's root directory. See the description below regarding what the API response includes for directories. @@ -24812,15 +24993,7 @@ export interface operations { }; requestBody: { content: { - "application/json": (Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; - }>) & { + "application/json": { /** Specify a custom domain for the repository. Sending a `null` value will remove the custom domain. For more about custom domains, see "[Using a custom domain with GitHub Pages](https://help.github.com/articles/using-a-custom-domain-with-github-pages/)." */ cname?: string | null; /** Specify whether HTTPS should be enforced for the repository. */ @@ -25685,11 +25858,7 @@ export interface operations { }; requestBody: { content: { - "application/json": (Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; - }>) & { + "application/json": { /** An array of user `login`s that will be requested. */ reviewers?: string[]; /** An array of team `slug`s that will be requested. */ @@ -26392,6 +26561,41 @@ export interface operations { }; }; }; + /** Create a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). A response with a `Status: 200 OK` means that you already added the reaction type to this release. */ + "reactions/create-for-release": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** release_id parameter */ + release_id: components["parameters"]["release_id"]; + }; + }; + responses: { + /** Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 415: components["responses"]["preview_header_missing"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the release. */ + content: "+1" | "laugh" | "heart" | "hooray" | "rocket" | "eyes"; + }; + }; + }; + }; /** * Lists all secret scanning alerts for a private repository, from newest to oldest. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope. * @@ -27089,8 +27293,8 @@ export interface operations { /** * Lists all public repositories in the order that they were created. * - * Notes: - * - For GitHub Enterprise Server and GitHub AE, this endpoint will only list repositories available to all users on the enterprise. + * Note: + * - For GitHub Enterprise Server, this endpoint will only list repositories available to all users on the enterprise. * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of repositories. */ "repos/list-public": { @@ -31523,7 +31727,7 @@ export interface operations { }; }; /** - * Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. + * **Deprecated**: Use `repos.compareCommitsWithBasehead()` (`GET /repos/{owner}/{repo}/compare/{basehead}`) instead. Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. * * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. * @@ -31590,4 +31794,44 @@ export interface operations { 500: components["responses"]["internal_error"]; }; }; + /** + * **Deprecated:** use `apps.createContentAttachmentForRepo()` (`POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments`) instead. Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment. + * + * The app must create a content attachment within six hours of the content reference URL being posted. See "[Using content attachments](https://docs.github.com/apps/using-content-attachments/)" for details about content attachments. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + "apps/create-content-attachment": { + parameters: { + path: { + content_reference_id: number; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["content-reference-attachment"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 415: components["responses"]["preview_header_missing"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** The title of the attachment */ + title: string; + /** The body of the attachment */ + body: string; + }; + }; + }; + }; +} +export interface external { } diff --git a/node_modules/@octokit/openapi-types/dist-types/version.d.ts b/node_modules/@octokit/openapi-types/dist-types/version.d.ts index 0fa23172..a44c9063 100644 --- a/node_modules/@octokit/openapi-types/dist-types/version.d.ts +++ b/node_modules/@octokit/openapi-types/dist-types/version.d.ts @@ -1 +1 @@ -export declare const VERSION = "7.0.0"; +export declare const VERSION = "7.3.2"; diff --git a/node_modules/@octokit/openapi-types/dist-web/index.js b/node_modules/@octokit/openapi-types/dist-web/index.js index cf889d2c..32292756 100644 --- a/node_modules/@octokit/openapi-types/dist-web/index.js +++ b/node_modules/@octokit/openapi-types/dist-web/index.js @@ -1,4 +1,4 @@ -const VERSION = "7.0.0"; +const VERSION = "7.3.2"; export { VERSION }; //# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/openapi-types/dist-web/index.js.map b/node_modules/@octokit/openapi-types/dist-web/index.js.map index 10a5040c..c6208440 100644 --- a/node_modules/@octokit/openapi-types/dist-web/index.js.map +++ b/node_modules/@octokit/openapi-types/dist-web/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/version.js"],"sourcesContent":["export const VERSION = \"7.0.0\";\n"],"names":[],"mappings":"AAAY,MAAC,OAAO,GAAG;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/version.js"],"sourcesContent":["export const VERSION = \"7.3.2\";\n"],"names":[],"mappings":"AAAY,MAAC,OAAO,GAAG;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/openapi-types/package.json b/node_modules/@octokit/openapi-types/package.json index 2b39555d..15ca9d4c 100644 --- a/node_modules/@octokit/openapi-types/package.json +++ b/node_modules/@octokit/openapi-types/package.json @@ -1,7 +1,7 @@ { "name": "@octokit/openapi-types", "description": "Generated TypeScript definitions based on GitHub's OpenAPI spec", - "version": "7.0.0", + "version": "7.3.2", "license": "MIT", "files": [ "dist-*/", @@ -13,12 +13,12 @@ "repository": "github:octokit/openapi-types.ts", "dependencies": {}, "devDependencies": { + "@gr2m/openapi-typescript": "^4.0.0-fork.2", "@octokit/tsconfig": "^1.0.2", "@pika/pack": "^0.5.0", "@pika/plugin-build-node": "^0.9.2", "@pika/plugin-build-web": "^0.9.2", "@pika/plugin-ts-standard-pkg": "^0.9.2", - "openapi-typescript": "^3.0.3", "pika-plugin-merge-properties": "^1.0.6", "semantic-release-plugin-update-version-in-files": "^1.1.0", "typescript": "^4.1.5" @@ -29,7 +29,7 @@ "source": "dist-src/index.js", "types": "dist-types/index.d.ts", "octokit": { - "openapi-version": "2.17.4" + "openapi-version": "2.22.1" }, "main": "dist-node/index.js", "module": "dist-web/index.js" diff --git a/node_modules/@octokit/plugin-paginate-rest/README.md b/node_modules/@octokit/plugin-paginate-rest/README.md index b158deec..ddd5f7d7 100644 --- a/node_modules/@octokit/plugin-paginate-rest/README.md +++ b/node_modules/@octokit/plugin-paginate-rest/README.md @@ -117,7 +117,7 @@ const issues = await octokit.paginate( Alternatively you can pass a `request` method as first argument. This is great when using in combination with [`@octokit/plugin-rest-endpoint-methods`](https://github.com/octokit/plugin-rest-endpoint-methods.js/): ```js -const issues = await octokit.paginate(octokit.issues.listForRepo, { +const issues = await octokit.paginate(octokit.rest.issues.listForRepo, { owner: "octocat", repo: "hello-world", since: "2010-10-01", @@ -156,7 +156,7 @@ const parameters = { per_page: 100, }; for await (const response of octokit.paginate.iterator( - octokit.issues.listForRepo, + octokit.rest.issues.listForRepo, parameters )) { // do whatever you want with each response, break out of the loop, etc. @@ -192,7 +192,6 @@ The plugin also exposes some types and runtime type guards for TypeScript projec + + + + ') +} + +function exitHead() { + this.lineEndingIfNeeded() + this.tag('') + this.setData('slurpOneLineEnding', true) + // Slurp the line ending from the delimiter row. +} + +function enterBody() { + // Clear slurping line ending from the delimiter row. + this.setData('slurpOneLineEnding') + this.tag('') +} + +function exitBody() { + this.lineEndingIfNeeded() + this.tag('') +} + +function enterRow() { + this.setData('tableColumn', 0) + this.lineEndingIfNeeded() + this.tag('') +} + +function exitRow() { + var align = this.getData('tableAlign') + var column = this.getData('tableColumn') + + while (column < align.length) { + this.lineEndingIfNeeded() + this.tag('') + column++ + } + + this.setData('tableColumn', column) + this.lineEndingIfNeeded() + this.tag('') +} + +function enterTableHeader() { + this.lineEndingIfNeeded() + this.tag( + '' + ) +} + +function exitTableHeader() { + this.tag('') + this.setData('tableColumn', this.getData('tableColumn') + 1) +} + +function enterTableData() { + var align = alignment[this.getData('tableAlign')[this.getData('tableColumn')]] + + if (align === undefined) { + // Capture results to ignore them. + this.buffer() + } else { + this.lineEndingIfNeeded() + this.tag('') + } +} + +function exitTableData() { + var column = this.getData('tableColumn') + + if (column in this.getData('tableAlign')) { + this.tag('') + this.setData('tableColumn', column + 1) + } else { + // Stop capturing. + this.resume() + } +} + +// Overwrite the default code text data handler to unescape escaped pipes when +// they are in tables. +function exitCodeTextData(token) { + var value = this.sliceSerialize(token) + + if (this.getData('tableAlign')) { + value = value.replace(/\\([\\|])/g, replace) + } + + this.raw(this.encode(value)) +} + +function replace($0, $1) { + // Pipes work, backslashes don’t (but can’t escape pipes). + return $1 === '|' ? $1 : $0 +} diff --git a/node_modules/micromark-extension-gfm-table/index.js b/node_modules/micromark-extension-gfm-table/index.js new file mode 100644 index 00000000..b64479d1 --- /dev/null +++ b/node_modules/micromark-extension-gfm-table/index.js @@ -0,0 +1 @@ +module.exports = require('./syntax') diff --git a/node_modules/micromark-extension-gfm-table/license b/node_modules/micromark-extension-gfm-table/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/micromark-extension-gfm-table/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/micromark-extension-gfm-table/package.json b/node_modules/micromark-extension-gfm-table/package.json new file mode 100644 index 00000000..749f5a36 --- /dev/null +++ b/node_modules/micromark-extension-gfm-table/package.json @@ -0,0 +1,76 @@ +{ + "name": "micromark-extension-gfm-table", + "version": "0.4.3", + "description": "micromark extension to support GFM tables", + "license": "MIT", + "keywords": [ + "micromark", + "micromark-extension", + "table", + "row", + "column", + "cell", + "tabular", + "gfm", + "markdown", + "unified" + ], + "repository": "micromark/micromark-extension-gfm-table", + "bugs": "https://github.com/micromark/micromark-extension-gfm-table/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "index.js", + "html.js", + "syntax.js" + ], + "dependencies": { + "micromark": "~2.11.0" + }, + "devDependencies": { + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "xo": "^0.38.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test/index.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "rules": { + "complexity": "off" + } + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/micromark-extension-gfm-table/readme.md b/node_modules/micromark-extension-gfm-table/readme.md new file mode 100644 index 00000000..e3b01264 --- /dev/null +++ b/node_modules/micromark-extension-gfm-table/readme.md @@ -0,0 +1,119 @@ +# micromark-extension-gfm-table + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +**[micromark][]** extension to support GitHub flavored markdown [tables][]. +This syntax extension matches the GFM spec and github.com. + +This package provides the low-level modules for integrating with the micromark +tokenizer and the micromark HTML compiler. + +You probably should use this package with +[`mdast-util-gfm-table`][mdast-util-gfm-table]. + +## Install + +[npm][]: + +```sh +npm install micromark-extension-gfm-table +``` + +## API + +### `html` + +### `syntax` + +> Note: `syntax` is the default export of this module, `html` is available at +> `micromark-extension-gfm-table/html`. + +Support [tables][]. +The exports are extensions for the micromark parser (to tokenize tables; can be +passed in `extensions`) and the default HTML compiler (to compile as `
-
Types diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js b/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js index 5cfb975a..edb6392a 100644 --- a/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js +++ b/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js @@ -2,7 +2,60 @@ Object.defineProperty(exports, '__esModule', { value: true }); -const VERSION = "2.13.3"; +const VERSION = "2.13.5"; + +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + + if (enumerableOnly) { + symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + } + + keys.push.apply(keys, symbols); + } + + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; + + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); + } else { + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + } + + return target; +} + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} /** * Some “list” response that can be paginated have a different response structure @@ -21,6 +74,13 @@ const VERSION = "2.13.3"; * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref */ function normalizePaginatedListResponse(response) { + // endpoints can respond with 204 if repository is empty + if (!response.data) { + return _objectSpread2(_objectSpread2({}, response), {}, { + data: [] + }); + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way // to retrieve the same information. @@ -59,19 +119,32 @@ function iterator(octokit, route, parameters) { if (!url) return { done: true }; - const response = await requestMethod({ - method, - url, - headers - }); - const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: normalizedResponse - }; + + try { + const response = await requestMethod({ + method, + url, + headers + }); + const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + + url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { + value: normalizedResponse + }; + } catch (error) { + if (error.status !== 409) throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } } }) diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map b/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map index 088f86c0..e29083fe 100644 --- a/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map +++ b/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/normalize-paginated-list-response.js","../dist-src/iterator.js","../dist-src/paginate.js","../dist-src/compose-paginate.js","../dist-src/generated/paginating-endpoints.js","../dist-src/paginating-endpoints.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"2.13.3\";\n","/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nexport function normalizePaginatedListResponse(response) {\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization)\n return response;\n // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\n","import { normalizePaginatedListResponse } from \"./normalize-paginated-list-response\";\nexport function iterator(octokit, route, parameters) {\n const options = typeof route === \"function\"\n ? route.endpoint(parameters)\n : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url)\n return { done: true };\n const response = await requestMethod({ method, url, headers });\n const normalizedResponse = normalizePaginatedListResponse(response);\n // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return { value: normalizedResponse };\n },\n }),\n };\n}\n","import { iterator } from \"./iterator\";\nexport function paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then((result) => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator, mapFn);\n });\n}\n","import { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nexport const composePaginateRest = Object.assign(paginate, {\n iterator,\n});\n","export const paginatingEndpoints = [\n \"GET /app/installations\",\n \"GET /applications/grants\",\n \"GET /authorizations\",\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n \"GET /enterprises/{enterprise}/actions/runner-groups\",\n \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\",\n \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\",\n \"GET /enterprises/{enterprise}/actions/runners\",\n \"GET /enterprises/{enterprise}/actions/runners/downloads\",\n \"GET /events\",\n \"GET /gists\",\n \"GET /gists/public\",\n \"GET /gists/starred\",\n \"GET /gists/{gist_id}/comments\",\n \"GET /gists/{gist_id}/commits\",\n \"GET /gists/{gist_id}/forks\",\n \"GET /installation/repositories\",\n \"GET /issues\",\n \"GET /marketplace_listing/plans\",\n \"GET /marketplace_listing/plans/{plan_id}/accounts\",\n \"GET /marketplace_listing/stubbed/plans\",\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"GET /networks/{owner}/{repo}/events\",\n \"GET /notifications\",\n \"GET /organizations\",\n \"GET /orgs/{org}/actions/permissions/repositories\",\n \"GET /orgs/{org}/actions/runner-groups\",\n \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\",\n \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\",\n \"GET /orgs/{org}/actions/runners\",\n \"GET /orgs/{org}/actions/runners/downloads\",\n \"GET /orgs/{org}/actions/secrets\",\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/blocks\",\n \"GET /orgs/{org}/credential-authorizations\",\n \"GET /orgs/{org}/events\",\n \"GET /orgs/{org}/failed_invitations\",\n \"GET /orgs/{org}/hooks\",\n \"GET /orgs/{org}/installations\",\n \"GET /orgs/{org}/invitations\",\n \"GET /orgs/{org}/invitations/{invitation_id}/teams\",\n \"GET /orgs/{org}/issues\",\n \"GET /orgs/{org}/members\",\n \"GET /orgs/{org}/migrations\",\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n \"GET /orgs/{org}/outside_collaborators\",\n \"GET /orgs/{org}/projects\",\n \"GET /orgs/{org}/public_members\",\n \"GET /orgs/{org}/repos\",\n \"GET /orgs/{org}/team-sync/groups\",\n \"GET /orgs/{org}/teams\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n \"GET /orgs/{org}/teams/{team_slug}/members\",\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n \"GET /orgs/{org}/teams/{team_slug}/repos\",\n \"GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings\",\n \"GET /orgs/{org}/teams/{team_slug}/teams\",\n \"GET /projects/columns/{column_id}/cards\",\n \"GET /projects/{project_id}/collaborators\",\n \"GET /projects/{project_id}/columns\",\n \"GET /repos/{owner}/{repo}/actions/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runners\",\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\",\n \"GET /repos/{owner}/{repo}/actions/runs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/secrets\",\n \"GET /repos/{owner}/{repo}/actions/workflows\",\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n \"GET /repos/{owner}/{repo}/assignees\",\n \"GET /repos/{owner}/{repo}/branches\",\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n \"GET /repos/{owner}/{repo}/code-scanning/analyses\",\n \"GET /repos/{owner}/{repo}/collaborators\",\n \"GET /repos/{owner}/{repo}/comments\",\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/commits\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n \"GET /repos/{owner}/{repo}/contributors\",\n \"GET /repos/{owner}/{repo}/deployments\",\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n \"GET /repos/{owner}/{repo}/events\",\n \"GET /repos/{owner}/{repo}/forks\",\n \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\",\n \"GET /repos/{owner}/{repo}/hooks\",\n \"GET /repos/{owner}/{repo}/invitations\",\n \"GET /repos/{owner}/{repo}/issues\",\n \"GET /repos/{owner}/{repo}/issues/comments\",\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n \"GET /repos/{owner}/{repo}/keys\",\n \"GET /repos/{owner}/{repo}/labels\",\n \"GET /repos/{owner}/{repo}/milestones\",\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n \"GET /repos/{owner}/{repo}/notifications\",\n \"GET /repos/{owner}/{repo}/pages/builds\",\n \"GET /repos/{owner}/{repo}/projects\",\n \"GET /repos/{owner}/{repo}/pulls\",\n \"GET /repos/{owner}/{repo}/pulls/comments\",\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n \"GET /repos/{owner}/{repo}/releases\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/stargazers\",\n \"GET /repos/{owner}/{repo}/subscribers\",\n \"GET /repos/{owner}/{repo}/tags\",\n \"GET /repos/{owner}/{repo}/teams\",\n \"GET /repositories\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n \"GET /scim/v2/enterprises/{enterprise}/Groups\",\n \"GET /scim/v2/enterprises/{enterprise}/Users\",\n \"GET /scim/v2/organizations/{org}/Users\",\n \"GET /search/code\",\n \"GET /search/commits\",\n \"GET /search/issues\",\n \"GET /search/labels\",\n \"GET /search/repositories\",\n \"GET /search/topics\",\n \"GET /search/users\",\n \"GET /teams/{team_id}/discussions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\",\n \"GET /teams/{team_id}/invitations\",\n \"GET /teams/{team_id}/members\",\n \"GET /teams/{team_id}/projects\",\n \"GET /teams/{team_id}/repos\",\n \"GET /teams/{team_id}/team-sync/group-mappings\",\n \"GET /teams/{team_id}/teams\",\n \"GET /user/blocks\",\n \"GET /user/emails\",\n \"GET /user/followers\",\n \"GET /user/following\",\n \"GET /user/gpg_keys\",\n \"GET /user/installations\",\n \"GET /user/installations/{installation_id}/repositories\",\n \"GET /user/issues\",\n \"GET /user/keys\",\n \"GET /user/marketplace_purchases\",\n \"GET /user/marketplace_purchases/stubbed\",\n \"GET /user/memberships/orgs\",\n \"GET /user/migrations\",\n \"GET /user/migrations/{migration_id}/repositories\",\n \"GET /user/orgs\",\n \"GET /user/public_emails\",\n \"GET /user/repos\",\n \"GET /user/repository_invitations\",\n \"GET /user/starred\",\n \"GET /user/subscriptions\",\n \"GET /user/teams\",\n \"GET /users\",\n \"GET /users/{username}/events\",\n \"GET /users/{username}/events/orgs/{org}\",\n \"GET /users/{username}/events/public\",\n \"GET /users/{username}/followers\",\n \"GET /users/{username}/following\",\n \"GET /users/{username}/gists\",\n \"GET /users/{username}/gpg_keys\",\n \"GET /users/{username}/keys\",\n \"GET /users/{username}/orgs\",\n \"GET /users/{username}/projects\",\n \"GET /users/{username}/received_events\",\n \"GET /users/{username}/received_events/public\",\n \"GET /users/{username}/repos\",\n \"GET /users/{username}/starred\",\n \"GET /users/{username}/subscriptions\",\n];\n","import { paginatingEndpoints, } from \"./generated/paginating-endpoints\";\nexport { paginatingEndpoints } from \"./generated/paginating-endpoints\";\nexport function isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n }\n else {\n return false;\n }\n}\n","import { VERSION } from \"./version\";\nimport { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nexport { composePaginateRest } from \"./compose-paginate\";\nexport { isPaginatingEndpoint, paginatingEndpoints, } from \"./paginating-endpoints\";\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\nexport function paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit),\n }),\n };\n}\npaginateRest.VERSION = VERSION;\n"],"names":["VERSION","normalizePaginatedListResponse","response","responseNeedsNormalization","data","incompleteResults","incomplete_results","repositorySelection","repository_selection","totalCount","total_count","namespaceKey","Object","keys","iterator","octokit","route","parameters","options","endpoint","request","requestMethod","method","headers","url","Symbol","asyncIterator","next","done","normalizedResponse","link","match","value","paginate","mapFn","undefined","gather","results","then","result","earlyExit","concat","composePaginateRest","assign","paginatingEndpoints","isPaginatingEndpoint","arg","includes","paginateRest","bind"],"mappings":";;;;AAAO,MAAMA,OAAO,GAAG,mBAAhB;;ACAP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,AAAO,SAASC,8BAAT,CAAwCC,QAAxC,EAAkD;AACrD,QAAMC,0BAA0B,GAAG,iBAAiBD,QAAQ,CAACE,IAA1B,IAAkC,EAAE,SAASF,QAAQ,CAACE,IAApB,CAArE;AACA,MAAI,CAACD,0BAAL,EACI,OAAOD,QAAP,CAHiD;AAKrD;;AACA,QAAMG,iBAAiB,GAAGH,QAAQ,CAACE,IAAT,CAAcE,kBAAxC;AACA,QAAMC,mBAAmB,GAAGL,QAAQ,CAACE,IAAT,CAAcI,oBAA1C;AACA,QAAMC,UAAU,GAAGP,QAAQ,CAACE,IAAT,CAAcM,WAAjC;AACA,SAAOR,QAAQ,CAACE,IAAT,CAAcE,kBAArB;AACA,SAAOJ,QAAQ,CAACE,IAAT,CAAcI,oBAArB;AACA,SAAON,QAAQ,CAACE,IAAT,CAAcM,WAArB;AACA,QAAMC,YAAY,GAAGC,MAAM,CAACC,IAAP,CAAYX,QAAQ,CAACE,IAArB,EAA2B,CAA3B,CAArB;AACA,QAAMA,IAAI,GAAGF,QAAQ,CAACE,IAAT,CAAcO,YAAd,CAAb;AACAT,EAAAA,QAAQ,CAACE,IAAT,GAAgBA,IAAhB;;AACA,MAAI,OAAOC,iBAAP,KAA6B,WAAjC,EAA8C;AAC1CH,IAAAA,QAAQ,CAACE,IAAT,CAAcE,kBAAd,GAAmCD,iBAAnC;AACH;;AACD,MAAI,OAAOE,mBAAP,KAA+B,WAAnC,EAAgD;AAC5CL,IAAAA,QAAQ,CAACE,IAAT,CAAcI,oBAAd,GAAqCD,mBAArC;AACH;;AACDL,EAAAA,QAAQ,CAACE,IAAT,CAAcM,WAAd,GAA4BD,UAA5B;AACA,SAAOP,QAAP;AACH;;ACtCM,SAASY,QAAT,CAAkBC,OAAlB,EAA2BC,KAA3B,EAAkCC,UAAlC,EAA8C;AACjD,QAAMC,OAAO,GAAG,OAAOF,KAAP,KAAiB,UAAjB,GACVA,KAAK,CAACG,QAAN,CAAeF,UAAf,CADU,GAEVF,OAAO,CAACK,OAAR,CAAgBD,QAAhB,CAAyBH,KAAzB,EAAgCC,UAAhC,CAFN;AAGA,QAAMI,aAAa,GAAG,OAAOL,KAAP,KAAiB,UAAjB,GAA8BA,KAA9B,GAAsCD,OAAO,CAACK,OAApE;AACA,QAAME,MAAM,GAAGJ,OAAO,CAACI,MAAvB;AACA,QAAMC,OAAO,GAAGL,OAAO,CAACK,OAAxB;AACA,MAAIC,GAAG,GAAGN,OAAO,CAACM,GAAlB;AACA,SAAO;AACH,KAACC,MAAM,CAACC,aAAR,GAAwB,OAAO;AAC3B,YAAMC,IAAN,GAAa;AACT,YAAI,CAACH,GAAL,EACI,OAAO;AAAEI,UAAAA,IAAI,EAAE;AAAR,SAAP;AACJ,cAAM1B,QAAQ,GAAG,MAAMmB,aAAa,CAAC;AAAEC,UAAAA,MAAF;AAAUE,UAAAA,GAAV;AAAeD,UAAAA;AAAf,SAAD,CAApC;AACA,cAAMM,kBAAkB,GAAG5B,8BAA8B,CAACC,QAAD,CAAzD,CAJS;AAMT;AACA;;AACAsB,QAAAA,GAAG,GAAG,CAAC,CAACK,kBAAkB,CAACN,OAAnB,CAA2BO,IAA3B,IAAmC,EAApC,EAAwCC,KAAxC,CAA8C,yBAA9C,KAA4E,EAA7E,EAAiF,CAAjF,CAAN;AACA,eAAO;AAAEC,UAAAA,KAAK,EAAEH;AAAT,SAAP;AACH;;AAX0B,KAAP;AADrB,GAAP;AAeH;;ACvBM,SAASI,QAAT,CAAkBlB,OAAlB,EAA2BC,KAA3B,EAAkCC,UAAlC,EAA8CiB,KAA9C,EAAqD;AACxD,MAAI,OAAOjB,UAAP,KAAsB,UAA1B,EAAsC;AAClCiB,IAAAA,KAAK,GAAGjB,UAAR;AACAA,IAAAA,UAAU,GAAGkB,SAAb;AACH;;AACD,SAAOC,MAAM,CAACrB,OAAD,EAAU,EAAV,EAAcD,QAAQ,CAACC,OAAD,EAAUC,KAAV,EAAiBC,UAAjB,CAAR,CAAqCQ,MAAM,CAACC,aAA5C,GAAd,EAA4EQ,KAA5E,CAAb;AACH;;AACD,SAASE,MAAT,CAAgBrB,OAAhB,EAAyBsB,OAAzB,EAAkCvB,QAAlC,EAA4CoB,KAA5C,EAAmD;AAC/C,SAAOpB,QAAQ,CAACa,IAAT,GAAgBW,IAAhB,CAAsBC,MAAD,IAAY;AACpC,QAAIA,MAAM,CAACX,IAAX,EAAiB;AACb,aAAOS,OAAP;AACH;;AACD,QAAIG,SAAS,GAAG,KAAhB;;AACA,aAASZ,IAAT,GAAgB;AACZY,MAAAA,SAAS,GAAG,IAAZ;AACH;;AACDH,IAAAA,OAAO,GAAGA,OAAO,CAACI,MAAR,CAAeP,KAAK,GAAGA,KAAK,CAACK,MAAM,CAACP,KAAR,EAAeJ,IAAf,CAAR,GAA+BW,MAAM,CAACP,KAAP,CAAa5B,IAAhE,CAAV;;AACA,QAAIoC,SAAJ,EAAe;AACX,aAAOH,OAAP;AACH;;AACD,WAAOD,MAAM,CAACrB,OAAD,EAAUsB,OAAV,EAAmBvB,QAAnB,EAA6BoB,KAA7B,CAAb;AACH,GAbM,CAAP;AAcH;;MCrBYQ,mBAAmB,GAAG9B,MAAM,CAAC+B,MAAP,CAAcV,QAAd,EAAwB;AACvDnB,EAAAA;AADuD,CAAxB,CAA5B;;MCFM8B,mBAAmB,GAAG,CAC/B,wBAD+B,EAE/B,0BAF+B,EAG/B,qBAH+B,EAI/B,iEAJ+B,EAK/B,qDAL+B,EAM/B,qFAN+B,EAO/B,+EAP+B,EAQ/B,+CAR+B,EAS/B,yDAT+B,EAU/B,aAV+B,EAW/B,YAX+B,EAY/B,mBAZ+B,EAa/B,oBAb+B,EAc/B,+BAd+B,EAe/B,8BAf+B,EAgB/B,4BAhB+B,EAiB/B,gCAjB+B,EAkB/B,aAlB+B,EAmB/B,gCAnB+B,EAoB/B,mDApB+B,EAqB/B,wCArB+B,EAsB/B,2DAtB+B,EAuB/B,qCAvB+B,EAwB/B,oBAxB+B,EAyB/B,oBAzB+B,EA0B/B,kDA1B+B,EA2B/B,uCA3B+B,EA4B/B,sEA5B+B,EA6B/B,iEA7B+B,EA8B/B,iCA9B+B,EA+B/B,2CA/B+B,EAgC/B,iCAhC+B,EAiC/B,4DAjC+B,EAkC/B,wBAlC+B,EAmC/B,2CAnC+B,EAoC/B,wBApC+B,EAqC/B,oCArC+B,EAsC/B,uBAtC+B,EAuC/B,+BAvC+B,EAwC/B,6BAxC+B,EAyC/B,mDAzC+B,EA0C/B,wBA1C+B,EA2C/B,yBA3C+B,EA4C/B,4BA5C+B,EA6C/B,wDA7C+B,EA8C/B,uCA9C+B,EA+C/B,0BA/C+B,EAgD/B,gCAhD+B,EAiD/B,uBAjD+B,EAkD/B,kCAlD+B,EAmD/B,uBAnD+B,EAoD/B,+CApD+B,EAqD/B,4EArD+B,EAsD/B,uGAtD+B,EAuD/B,6EAvD+B,EAwD/B,+CAxD+B,EAyD/B,2CAzD+B,EA0D/B,4CA1D+B,EA2D/B,yCA3D+B,EA4D/B,4DA5D+B,EA6D/B,yCA7D+B,EA8D/B,yCA9D+B,EA+D/B,0CA/D+B,EAgE/B,oCAhE+B,EAiE/B,6CAjE+B,EAkE/B,2CAlE+B,EAmE/B,qDAnE+B,EAoE/B,wCApE+B,EAqE/B,2DArE+B,EAsE/B,sDAtE+B,EAuE/B,2CAvE+B,EAwE/B,6CAxE+B,EAyE/B,gEAzE+B,EA0E/B,qCA1E+B,EA2E/B,oCA3E+B,EA4E/B,iEA5E+B,EA6E/B,oEA7E+B,EA8E/B,gDA9E+B,EA+E/B,yEA/E+B,EAgF/B,kDAhF+B,EAiF/B,yCAjF+B,EAkF/B,oCAlF+B,EAmF/B,2DAnF+B,EAoF/B,mCApF+B,EAqF/B,oEArF+B,EAsF/B,yDAtF+B,EAuF/B,sDAvF+B,EAwF/B,oDAxF+B,EAyF/B,sDAzF+B,EA0F/B,kDA1F+B,EA2F/B,wCA3F+B,EA4F/B,uCA5F+B,EA6F/B,gEA7F+B,EA8F/B,kCA9F+B,EA+F/B,iCA/F+B,EAgG/B,mDAhG+B,EAiG/B,iCAjG+B,EAkG/B,uCAlG+B,EAmG/B,kCAnG+B,EAoG/B,2CApG+B,EAqG/B,kEArG+B,EAsG/B,yCAtG+B,EAuG/B,0DAvG+B,EAwG/B,wDAxG+B,EAyG/B,wDAzG+B,EA0G/B,2DA1G+B,EA2G/B,0DA3G+B,EA4G/B,gCA5G+B,EA6G/B,kCA7G+B,EA8G/B,sCA9G+B,EA+G/B,gEA/G+B,EAgH/B,yCAhH+B,EAiH/B,wCAjH+B,EAkH/B,oCAlH+B,EAmH/B,iCAnH+B,EAoH/B,0CApH+B,EAqH/B,iEArH+B,EAsH/B,wDAtH+B,EAuH/B,uDAvH+B,EAwH/B,qDAxH+B,EAyH/B,mEAzH+B,EA0H/B,uDA1H+B,EA2H/B,4EA3H+B,EA4H/B,oCA5H+B,EA6H/B,wDA7H+B,EA8H/B,kDA9H+B,EA+H/B,sCA/H+B,EAgI/B,uCAhI+B,EAiI/B,gCAjI+B,EAkI/B,iCAlI+B,EAmI/B,mBAnI+B,EAoI/B,2EApI+B,EAqI/B,8CArI+B,EAsI/B,6CAtI+B,EAuI/B,wCAvI+B,EAwI/B,kBAxI+B,EAyI/B,qBAzI+B,EA0I/B,oBA1I+B,EA2I/B,oBA3I+B,EA4I/B,0BA5I+B,EA6I/B,oBA7I+B,EA8I/B,mBA9I+B,EA+I/B,kCA/I+B,EAgJ/B,+DAhJ+B,EAiJ/B,0FAjJ+B,EAkJ/B,gEAlJ+B,EAmJ/B,kCAnJ+B,EAoJ/B,8BApJ+B,EAqJ/B,+BArJ+B,EAsJ/B,4BAtJ+B,EAuJ/B,+CAvJ+B,EAwJ/B,4BAxJ+B,EAyJ/B,kBAzJ+B,EA0J/B,kBA1J+B,EA2J/B,qBA3J+B,EA4J/B,qBA5J+B,EA6J/B,oBA7J+B,EA8J/B,yBA9J+B,EA+J/B,wDA/J+B,EAgK/B,kBAhK+B,EAiK/B,gBAjK+B,EAkK/B,iCAlK+B,EAmK/B,yCAnK+B,EAoK/B,4BApK+B,EAqK/B,sBArK+B,EAsK/B,kDAtK+B,EAuK/B,gBAvK+B,EAwK/B,yBAxK+B,EAyK/B,iBAzK+B,EA0K/B,kCA1K+B,EA2K/B,mBA3K+B,EA4K/B,yBA5K+B,EA6K/B,iBA7K+B,EA8K/B,YA9K+B,EA+K/B,8BA/K+B,EAgL/B,yCAhL+B,EAiL/B,qCAjL+B,EAkL/B,iCAlL+B,EAmL/B,iCAnL+B,EAoL/B,6BApL+B,EAqL/B,gCArL+B,EAsL/B,4BAtL+B,EAuL/B,4BAvL+B,EAwL/B,gCAxL+B,EAyL/B,uCAzL+B,EA0L/B,8CA1L+B,EA2L/B,6BA3L+B,EA4L/B,+BA5L+B,EA6L/B,qCA7L+B,CAA5B;;ACEA,SAASC,oBAAT,CAA8BC,GAA9B,EAAmC;AACtC,MAAI,OAAOA,GAAP,KAAe,QAAnB,EAA6B;AACzB,WAAOF,mBAAmB,CAACG,QAApB,CAA6BD,GAA7B,CAAP;AACH,GAFD,MAGK;AACD,WAAO,KAAP;AACH;AACJ;;ACJD;AACA;AACA;AACA;;AACA,AAAO,SAASE,YAAT,CAAsBjC,OAAtB,EAA+B;AAClC,SAAO;AACHkB,IAAAA,QAAQ,EAAErB,MAAM,CAAC+B,MAAP,CAAcV,QAAQ,CAACgB,IAAT,CAAc,IAAd,EAAoBlC,OAApB,CAAd,EAA4C;AAClDD,MAAAA,QAAQ,EAAEA,QAAQ,CAACmC,IAAT,CAAc,IAAd,EAAoBlC,OAApB;AADwC,KAA5C;AADP,GAAP;AAKH;AACDiC,YAAY,CAAChD,OAAb,GAAuBA,OAAvB;;;;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/normalize-paginated-list-response.js","../dist-src/iterator.js","../dist-src/paginate.js","../dist-src/compose-paginate.js","../dist-src/generated/paginating-endpoints.js","../dist-src/paginating-endpoints.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"2.13.5\";\n","/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nexport function normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return {\n ...response,\n data: [],\n };\n }\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization)\n return response;\n // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\n","import { normalizePaginatedListResponse } from \"./normalize-paginated-list-response\";\nexport function iterator(octokit, route, parameters) {\n const options = typeof route === \"function\"\n ? route.endpoint(parameters)\n : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url)\n return { done: true };\n try {\n const response = await requestMethod({ method, url, headers });\n const normalizedResponse = normalizePaginatedListResponse(response);\n // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return { value: normalizedResponse };\n }\n catch (error) {\n if (error.status !== 409)\n throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: [],\n },\n };\n }\n },\n }),\n };\n}\n","import { iterator } from \"./iterator\";\nexport function paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then((result) => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator, mapFn);\n });\n}\n","import { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nexport const composePaginateRest = Object.assign(paginate, {\n iterator,\n});\n","export const paginatingEndpoints = [\n \"GET /app/installations\",\n \"GET /applications/grants\",\n \"GET /authorizations\",\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n \"GET /enterprises/{enterprise}/actions/runner-groups\",\n \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\",\n \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\",\n \"GET /enterprises/{enterprise}/actions/runners\",\n \"GET /enterprises/{enterprise}/actions/runners/downloads\",\n \"GET /events\",\n \"GET /gists\",\n \"GET /gists/public\",\n \"GET /gists/starred\",\n \"GET /gists/{gist_id}/comments\",\n \"GET /gists/{gist_id}/commits\",\n \"GET /gists/{gist_id}/forks\",\n \"GET /installation/repositories\",\n \"GET /issues\",\n \"GET /marketplace_listing/plans\",\n \"GET /marketplace_listing/plans/{plan_id}/accounts\",\n \"GET /marketplace_listing/stubbed/plans\",\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"GET /networks/{owner}/{repo}/events\",\n \"GET /notifications\",\n \"GET /organizations\",\n \"GET /orgs/{org}/actions/permissions/repositories\",\n \"GET /orgs/{org}/actions/runner-groups\",\n \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\",\n \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\",\n \"GET /orgs/{org}/actions/runners\",\n \"GET /orgs/{org}/actions/runners/downloads\",\n \"GET /orgs/{org}/actions/secrets\",\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/blocks\",\n \"GET /orgs/{org}/credential-authorizations\",\n \"GET /orgs/{org}/events\",\n \"GET /orgs/{org}/failed_invitations\",\n \"GET /orgs/{org}/hooks\",\n \"GET /orgs/{org}/installations\",\n \"GET /orgs/{org}/invitations\",\n \"GET /orgs/{org}/invitations/{invitation_id}/teams\",\n \"GET /orgs/{org}/issues\",\n \"GET /orgs/{org}/members\",\n \"GET /orgs/{org}/migrations\",\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n \"GET /orgs/{org}/outside_collaborators\",\n \"GET /orgs/{org}/projects\",\n \"GET /orgs/{org}/public_members\",\n \"GET /orgs/{org}/repos\",\n \"GET /orgs/{org}/team-sync/groups\",\n \"GET /orgs/{org}/teams\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n \"GET /orgs/{org}/teams/{team_slug}/members\",\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n \"GET /orgs/{org}/teams/{team_slug}/repos\",\n \"GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings\",\n \"GET /orgs/{org}/teams/{team_slug}/teams\",\n \"GET /projects/columns/{column_id}/cards\",\n \"GET /projects/{project_id}/collaborators\",\n \"GET /projects/{project_id}/columns\",\n \"GET /repos/{owner}/{repo}/actions/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runners\",\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\",\n \"GET /repos/{owner}/{repo}/actions/runs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/secrets\",\n \"GET /repos/{owner}/{repo}/actions/workflows\",\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n \"GET /repos/{owner}/{repo}/assignees\",\n \"GET /repos/{owner}/{repo}/branches\",\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n \"GET /repos/{owner}/{repo}/code-scanning/analyses\",\n \"GET /repos/{owner}/{repo}/collaborators\",\n \"GET /repos/{owner}/{repo}/comments\",\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/commits\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n \"GET /repos/{owner}/{repo}/contributors\",\n \"GET /repos/{owner}/{repo}/deployments\",\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n \"GET /repos/{owner}/{repo}/events\",\n \"GET /repos/{owner}/{repo}/forks\",\n \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\",\n \"GET /repos/{owner}/{repo}/hooks\",\n \"GET /repos/{owner}/{repo}/invitations\",\n \"GET /repos/{owner}/{repo}/issues\",\n \"GET /repos/{owner}/{repo}/issues/comments\",\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n \"GET /repos/{owner}/{repo}/keys\",\n \"GET /repos/{owner}/{repo}/labels\",\n \"GET /repos/{owner}/{repo}/milestones\",\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n \"GET /repos/{owner}/{repo}/notifications\",\n \"GET /repos/{owner}/{repo}/pages/builds\",\n \"GET /repos/{owner}/{repo}/projects\",\n \"GET /repos/{owner}/{repo}/pulls\",\n \"GET /repos/{owner}/{repo}/pulls/comments\",\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n \"GET /repos/{owner}/{repo}/releases\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/stargazers\",\n \"GET /repos/{owner}/{repo}/subscribers\",\n \"GET /repos/{owner}/{repo}/tags\",\n \"GET /repos/{owner}/{repo}/teams\",\n \"GET /repositories\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n \"GET /scim/v2/enterprises/{enterprise}/Groups\",\n \"GET /scim/v2/enterprises/{enterprise}/Users\",\n \"GET /scim/v2/organizations/{org}/Users\",\n \"GET /search/code\",\n \"GET /search/commits\",\n \"GET /search/issues\",\n \"GET /search/labels\",\n \"GET /search/repositories\",\n \"GET /search/topics\",\n \"GET /search/users\",\n \"GET /teams/{team_id}/discussions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\",\n \"GET /teams/{team_id}/invitations\",\n \"GET /teams/{team_id}/members\",\n \"GET /teams/{team_id}/projects\",\n \"GET /teams/{team_id}/repos\",\n \"GET /teams/{team_id}/team-sync/group-mappings\",\n \"GET /teams/{team_id}/teams\",\n \"GET /user/blocks\",\n \"GET /user/emails\",\n \"GET /user/followers\",\n \"GET /user/following\",\n \"GET /user/gpg_keys\",\n \"GET /user/installations\",\n \"GET /user/installations/{installation_id}/repositories\",\n \"GET /user/issues\",\n \"GET /user/keys\",\n \"GET /user/marketplace_purchases\",\n \"GET /user/marketplace_purchases/stubbed\",\n \"GET /user/memberships/orgs\",\n \"GET /user/migrations\",\n \"GET /user/migrations/{migration_id}/repositories\",\n \"GET /user/orgs\",\n \"GET /user/public_emails\",\n \"GET /user/repos\",\n \"GET /user/repository_invitations\",\n \"GET /user/starred\",\n \"GET /user/subscriptions\",\n \"GET /user/teams\",\n \"GET /users\",\n \"GET /users/{username}/events\",\n \"GET /users/{username}/events/orgs/{org}\",\n \"GET /users/{username}/events/public\",\n \"GET /users/{username}/followers\",\n \"GET /users/{username}/following\",\n \"GET /users/{username}/gists\",\n \"GET /users/{username}/gpg_keys\",\n \"GET /users/{username}/keys\",\n \"GET /users/{username}/orgs\",\n \"GET /users/{username}/projects\",\n \"GET /users/{username}/received_events\",\n \"GET /users/{username}/received_events/public\",\n \"GET /users/{username}/repos\",\n \"GET /users/{username}/starred\",\n \"GET /users/{username}/subscriptions\",\n];\n","import { paginatingEndpoints, } from \"./generated/paginating-endpoints\";\nexport { paginatingEndpoints } from \"./generated/paginating-endpoints\";\nexport function isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n }\n else {\n return false;\n }\n}\n","import { VERSION } from \"./version\";\nimport { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nexport { composePaginateRest } from \"./compose-paginate\";\nexport { isPaginatingEndpoint, paginatingEndpoints, } from \"./paginating-endpoints\";\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\nexport function paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit),\n }),\n };\n}\npaginateRest.VERSION = VERSION;\n"],"names":["VERSION","normalizePaginatedListResponse","response","data","responseNeedsNormalization","incompleteResults","incomplete_results","repositorySelection","repository_selection","totalCount","total_count","namespaceKey","Object","keys","iterator","octokit","route","parameters","options","endpoint","request","requestMethod","method","headers","url","Symbol","asyncIterator","next","done","normalizedResponse","link","match","value","error","status","paginate","mapFn","undefined","gather","results","then","result","earlyExit","concat","composePaginateRest","assign","paginatingEndpoints","isPaginatingEndpoint","arg","includes","paginateRest","bind"],"mappings":";;;;AAAO,MAAMA,OAAO,GAAG,mBAAhB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,AAAO,SAASC,8BAAT,CAAwCC,QAAxC,EAAkD;AACrD;AACA,MAAI,CAACA,QAAQ,CAACC,IAAd,EAAoB;AAChB,6CACOD,QADP;AAEIC,MAAAA,IAAI,EAAE;AAFV;AAIH;;AACD,QAAMC,0BAA0B,GAAG,iBAAiBF,QAAQ,CAACC,IAA1B,IAAkC,EAAE,SAASD,QAAQ,CAACC,IAApB,CAArE;AACA,MAAI,CAACC,0BAAL,EACI,OAAOF,QAAP,CAViD;AAYrD;;AACA,QAAMG,iBAAiB,GAAGH,QAAQ,CAACC,IAAT,CAAcG,kBAAxC;AACA,QAAMC,mBAAmB,GAAGL,QAAQ,CAACC,IAAT,CAAcK,oBAA1C;AACA,QAAMC,UAAU,GAAGP,QAAQ,CAACC,IAAT,CAAcO,WAAjC;AACA,SAAOR,QAAQ,CAACC,IAAT,CAAcG,kBAArB;AACA,SAAOJ,QAAQ,CAACC,IAAT,CAAcK,oBAArB;AACA,SAAON,QAAQ,CAACC,IAAT,CAAcO,WAArB;AACA,QAAMC,YAAY,GAAGC,MAAM,CAACC,IAAP,CAAYX,QAAQ,CAACC,IAArB,EAA2B,CAA3B,CAArB;AACA,QAAMA,IAAI,GAAGD,QAAQ,CAACC,IAAT,CAAcQ,YAAd,CAAb;AACAT,EAAAA,QAAQ,CAACC,IAAT,GAAgBA,IAAhB;;AACA,MAAI,OAAOE,iBAAP,KAA6B,WAAjC,EAA8C;AAC1CH,IAAAA,QAAQ,CAACC,IAAT,CAAcG,kBAAd,GAAmCD,iBAAnC;AACH;;AACD,MAAI,OAAOE,mBAAP,KAA+B,WAAnC,EAAgD;AAC5CL,IAAAA,QAAQ,CAACC,IAAT,CAAcK,oBAAd,GAAqCD,mBAArC;AACH;;AACDL,EAAAA,QAAQ,CAACC,IAAT,CAAcO,WAAd,GAA4BD,UAA5B;AACA,SAAOP,QAAP;AACH;;AC7CM,SAASY,QAAT,CAAkBC,OAAlB,EAA2BC,KAA3B,EAAkCC,UAAlC,EAA8C;AACjD,QAAMC,OAAO,GAAG,OAAOF,KAAP,KAAiB,UAAjB,GACVA,KAAK,CAACG,QAAN,CAAeF,UAAf,CADU,GAEVF,OAAO,CAACK,OAAR,CAAgBD,QAAhB,CAAyBH,KAAzB,EAAgCC,UAAhC,CAFN;AAGA,QAAMI,aAAa,GAAG,OAAOL,KAAP,KAAiB,UAAjB,GAA8BA,KAA9B,GAAsCD,OAAO,CAACK,OAApE;AACA,QAAME,MAAM,GAAGJ,OAAO,CAACI,MAAvB;AACA,QAAMC,OAAO,GAAGL,OAAO,CAACK,OAAxB;AACA,MAAIC,GAAG,GAAGN,OAAO,CAACM,GAAlB;AACA,SAAO;AACH,KAACC,MAAM,CAACC,aAAR,GAAwB,OAAO;AAC3B,YAAMC,IAAN,GAAa;AACT,YAAI,CAACH,GAAL,EACI,OAAO;AAAEI,UAAAA,IAAI,EAAE;AAAR,SAAP;;AACJ,YAAI;AACA,gBAAM1B,QAAQ,GAAG,MAAMmB,aAAa,CAAC;AAAEC,YAAAA,MAAF;AAAUE,YAAAA,GAAV;AAAeD,YAAAA;AAAf,WAAD,CAApC;AACA,gBAAMM,kBAAkB,GAAG5B,8BAA8B,CAACC,QAAD,CAAzD,CAFA;AAIA;AACA;;AACAsB,UAAAA,GAAG,GAAG,CAAC,CAACK,kBAAkB,CAACN,OAAnB,CAA2BO,IAA3B,IAAmC,EAApC,EAAwCC,KAAxC,CAA8C,yBAA9C,KAA4E,EAA7E,EAAiF,CAAjF,CAAN;AACA,iBAAO;AAAEC,YAAAA,KAAK,EAAEH;AAAT,WAAP;AACH,SARD,CASA,OAAOI,KAAP,EAAc;AACV,cAAIA,KAAK,CAACC,MAAN,KAAiB,GAArB,EACI,MAAMD,KAAN;AACJT,UAAAA,GAAG,GAAG,EAAN;AACA,iBAAO;AACHQ,YAAAA,KAAK,EAAE;AACHE,cAAAA,MAAM,EAAE,GADL;AAEHX,cAAAA,OAAO,EAAE,EAFN;AAGHpB,cAAAA,IAAI,EAAE;AAHH;AADJ,WAAP;AAOH;AACJ;;AAzB0B,KAAP;AADrB,GAAP;AA6BH;;ACrCM,SAASgC,QAAT,CAAkBpB,OAAlB,EAA2BC,KAA3B,EAAkCC,UAAlC,EAA8CmB,KAA9C,EAAqD;AACxD,MAAI,OAAOnB,UAAP,KAAsB,UAA1B,EAAsC;AAClCmB,IAAAA,KAAK,GAAGnB,UAAR;AACAA,IAAAA,UAAU,GAAGoB,SAAb;AACH;;AACD,SAAOC,MAAM,CAACvB,OAAD,EAAU,EAAV,EAAcD,QAAQ,CAACC,OAAD,EAAUC,KAAV,EAAiBC,UAAjB,CAAR,CAAqCQ,MAAM,CAACC,aAA5C,GAAd,EAA4EU,KAA5E,CAAb;AACH;;AACD,SAASE,MAAT,CAAgBvB,OAAhB,EAAyBwB,OAAzB,EAAkCzB,QAAlC,EAA4CsB,KAA5C,EAAmD;AAC/C,SAAOtB,QAAQ,CAACa,IAAT,GAAgBa,IAAhB,CAAsBC,MAAD,IAAY;AACpC,QAAIA,MAAM,CAACb,IAAX,EAAiB;AACb,aAAOW,OAAP;AACH;;AACD,QAAIG,SAAS,GAAG,KAAhB;;AACA,aAASd,IAAT,GAAgB;AACZc,MAAAA,SAAS,GAAG,IAAZ;AACH;;AACDH,IAAAA,OAAO,GAAGA,OAAO,CAACI,MAAR,CAAeP,KAAK,GAAGA,KAAK,CAACK,MAAM,CAACT,KAAR,EAAeJ,IAAf,CAAR,GAA+Ba,MAAM,CAACT,KAAP,CAAa7B,IAAhE,CAAV;;AACA,QAAIuC,SAAJ,EAAe;AACX,aAAOH,OAAP;AACH;;AACD,WAAOD,MAAM,CAACvB,OAAD,EAAUwB,OAAV,EAAmBzB,QAAnB,EAA6BsB,KAA7B,CAAb;AACH,GAbM,CAAP;AAcH;;MCrBYQ,mBAAmB,GAAGhC,MAAM,CAACiC,MAAP,CAAcV,QAAd,EAAwB;AACvDrB,EAAAA;AADuD,CAAxB,CAA5B;;MCFMgC,mBAAmB,GAAG,CAC/B,wBAD+B,EAE/B,0BAF+B,EAG/B,qBAH+B,EAI/B,iEAJ+B,EAK/B,qDAL+B,EAM/B,qFAN+B,EAO/B,+EAP+B,EAQ/B,+CAR+B,EAS/B,yDAT+B,EAU/B,aAV+B,EAW/B,YAX+B,EAY/B,mBAZ+B,EAa/B,oBAb+B,EAc/B,+BAd+B,EAe/B,8BAf+B,EAgB/B,4BAhB+B,EAiB/B,gCAjB+B,EAkB/B,aAlB+B,EAmB/B,gCAnB+B,EAoB/B,mDApB+B,EAqB/B,wCArB+B,EAsB/B,2DAtB+B,EAuB/B,qCAvB+B,EAwB/B,oBAxB+B,EAyB/B,oBAzB+B,EA0B/B,kDA1B+B,EA2B/B,uCA3B+B,EA4B/B,sEA5B+B,EA6B/B,iEA7B+B,EA8B/B,iCA9B+B,EA+B/B,2CA/B+B,EAgC/B,iCAhC+B,EAiC/B,4DAjC+B,EAkC/B,wBAlC+B,EAmC/B,2CAnC+B,EAoC/B,wBApC+B,EAqC/B,oCArC+B,EAsC/B,uBAtC+B,EAuC/B,+BAvC+B,EAwC/B,6BAxC+B,EAyC/B,mDAzC+B,EA0C/B,wBA1C+B,EA2C/B,yBA3C+B,EA4C/B,4BA5C+B,EA6C/B,wDA7C+B,EA8C/B,uCA9C+B,EA+C/B,0BA/C+B,EAgD/B,gCAhD+B,EAiD/B,uBAjD+B,EAkD/B,kCAlD+B,EAmD/B,uBAnD+B,EAoD/B,+CApD+B,EAqD/B,4EArD+B,EAsD/B,uGAtD+B,EAuD/B,6EAvD+B,EAwD/B,+CAxD+B,EAyD/B,2CAzD+B,EA0D/B,4CA1D+B,EA2D/B,yCA3D+B,EA4D/B,4DA5D+B,EA6D/B,yCA7D+B,EA8D/B,yCA9D+B,EA+D/B,0CA/D+B,EAgE/B,oCAhE+B,EAiE/B,6CAjE+B,EAkE/B,2CAlE+B,EAmE/B,qDAnE+B,EAoE/B,wCApE+B,EAqE/B,2DArE+B,EAsE/B,sDAtE+B,EAuE/B,2CAvE+B,EAwE/B,6CAxE+B,EAyE/B,gEAzE+B,EA0E/B,qCA1E+B,EA2E/B,oCA3E+B,EA4E/B,iEA5E+B,EA6E/B,oEA7E+B,EA8E/B,gDA9E+B,EA+E/B,yEA/E+B,EAgF/B,kDAhF+B,EAiF/B,yCAjF+B,EAkF/B,oCAlF+B,EAmF/B,2DAnF+B,EAoF/B,mCApF+B,EAqF/B,oEArF+B,EAsF/B,yDAtF+B,EAuF/B,sDAvF+B,EAwF/B,oDAxF+B,EAyF/B,sDAzF+B,EA0F/B,kDA1F+B,EA2F/B,wCA3F+B,EA4F/B,uCA5F+B,EA6F/B,gEA7F+B,EA8F/B,kCA9F+B,EA+F/B,iCA/F+B,EAgG/B,mDAhG+B,EAiG/B,iCAjG+B,EAkG/B,uCAlG+B,EAmG/B,kCAnG+B,EAoG/B,2CApG+B,EAqG/B,kEArG+B,EAsG/B,yCAtG+B,EAuG/B,0DAvG+B,EAwG/B,wDAxG+B,EAyG/B,wDAzG+B,EA0G/B,2DA1G+B,EA2G/B,0DA3G+B,EA4G/B,gCA5G+B,EA6G/B,kCA7G+B,EA8G/B,sCA9G+B,EA+G/B,gEA/G+B,EAgH/B,yCAhH+B,EAiH/B,wCAjH+B,EAkH/B,oCAlH+B,EAmH/B,iCAnH+B,EAoH/B,0CApH+B,EAqH/B,iEArH+B,EAsH/B,wDAtH+B,EAuH/B,uDAvH+B,EAwH/B,qDAxH+B,EAyH/B,mEAzH+B,EA0H/B,uDA1H+B,EA2H/B,4EA3H+B,EA4H/B,oCA5H+B,EA6H/B,wDA7H+B,EA8H/B,kDA9H+B,EA+H/B,sCA/H+B,EAgI/B,uCAhI+B,EAiI/B,gCAjI+B,EAkI/B,iCAlI+B,EAmI/B,mBAnI+B,EAoI/B,2EApI+B,EAqI/B,8CArI+B,EAsI/B,6CAtI+B,EAuI/B,wCAvI+B,EAwI/B,kBAxI+B,EAyI/B,qBAzI+B,EA0I/B,oBA1I+B,EA2I/B,oBA3I+B,EA4I/B,0BA5I+B,EA6I/B,oBA7I+B,EA8I/B,mBA9I+B,EA+I/B,kCA/I+B,EAgJ/B,+DAhJ+B,EAiJ/B,0FAjJ+B,EAkJ/B,gEAlJ+B,EAmJ/B,kCAnJ+B,EAoJ/B,8BApJ+B,EAqJ/B,+BArJ+B,EAsJ/B,4BAtJ+B,EAuJ/B,+CAvJ+B,EAwJ/B,4BAxJ+B,EAyJ/B,kBAzJ+B,EA0J/B,kBA1J+B,EA2J/B,qBA3J+B,EA4J/B,qBA5J+B,EA6J/B,oBA7J+B,EA8J/B,yBA9J+B,EA+J/B,wDA/J+B,EAgK/B,kBAhK+B,EAiK/B,gBAjK+B,EAkK/B,iCAlK+B,EAmK/B,yCAnK+B,EAoK/B,4BApK+B,EAqK/B,sBArK+B,EAsK/B,kDAtK+B,EAuK/B,gBAvK+B,EAwK/B,yBAxK+B,EAyK/B,iBAzK+B,EA0K/B,kCA1K+B,EA2K/B,mBA3K+B,EA4K/B,yBA5K+B,EA6K/B,iBA7K+B,EA8K/B,YA9K+B,EA+K/B,8BA/K+B,EAgL/B,yCAhL+B,EAiL/B,qCAjL+B,EAkL/B,iCAlL+B,EAmL/B,iCAnL+B,EAoL/B,6BApL+B,EAqL/B,gCArL+B,EAsL/B,4BAtL+B,EAuL/B,4BAvL+B,EAwL/B,gCAxL+B,EAyL/B,uCAzL+B,EA0L/B,8CA1L+B,EA2L/B,6BA3L+B,EA4L/B,+BA5L+B,EA6L/B,qCA7L+B,CAA5B;;ACEA,SAASC,oBAAT,CAA8BC,GAA9B,EAAmC;AACtC,MAAI,OAAOA,GAAP,KAAe,QAAnB,EAA6B;AACzB,WAAOF,mBAAmB,CAACG,QAApB,CAA6BD,GAA7B,CAAP;AACH,GAFD,MAGK;AACD,WAAO,KAAP;AACH;AACJ;;ACJD;AACA;AACA;AACA;;AACA,AAAO,SAASE,YAAT,CAAsBnC,OAAtB,EAA+B;AAClC,SAAO;AACHoB,IAAAA,QAAQ,EAAEvB,MAAM,CAACiC,MAAP,CAAcV,QAAQ,CAACgB,IAAT,CAAc,IAAd,EAAoBpC,OAApB,CAAd,EAA4C;AAClDD,MAAAA,QAAQ,EAAEA,QAAQ,CAACqC,IAAT,CAAc,IAAd,EAAoBpC,OAApB;AADwC,KAA5C;AADP,GAAP;AAKH;AACDmC,YAAY,CAAClD,OAAb,GAAuBA,OAAvB;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-src/iterator.js b/node_modules/@octokit/plugin-paginate-rest/dist-src/iterator.js index 14684db4..7f9ee644 100644 --- a/node_modules/@octokit/plugin-paginate-rest/dist-src/iterator.js +++ b/node_modules/@octokit/plugin-paginate-rest/dist-src/iterator.js @@ -12,13 +12,27 @@ export function iterator(octokit, route, parameters) { async next() { if (!url) return { done: true }; - const response = await requestMethod({ method, url, headers }); - const normalizedResponse = normalizePaginatedListResponse(response); - // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { value: normalizedResponse }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { value: normalizedResponse }; + } + catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [], + }, + }; + } }, }), }; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-src/normalize-paginated-list-response.js b/node_modules/@octokit/plugin-paginate-rest/dist-src/normalize-paginated-list-response.js index d29c6777..a87028b1 100644 --- a/node_modules/@octokit/plugin-paginate-rest/dist-src/normalize-paginated-list-response.js +++ b/node_modules/@octokit/plugin-paginate-rest/dist-src/normalize-paginated-list-response.js @@ -15,6 +15,13 @@ * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref */ export function normalizePaginatedListResponse(response) { + // endpoints can respond with 204 if repository is empty + if (!response.data) { + return { + ...response, + data: [], + }; + } const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); if (!responseNeedsNormalization) return response; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-src/version.js b/node_modules/@octokit/plugin-paginate-rest/dist-src/version.js index 4c8535ad..6773f651 100644 --- a/node_modules/@octokit/plugin-paginate-rest/dist-src/version.js +++ b/node_modules/@octokit/plugin-paginate-rest/dist-src/version.js @@ -1 +1 @@ -export const VERSION = "2.13.3"; +export const VERSION = "2.13.5"; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/generated/paginating-endpoints.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/generated/paginating-endpoints.d.ts index e690626f..23c05ec3 100644 --- a/node_modules/@octokit/plugin-paginate-rest/dist-types/generated/paginating-endpoints.d.ts +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/generated/paginating-endpoints.d.ts @@ -1,7 +1,7 @@ import { Endpoints } from "@octokit/types"; export interface PaginatingEndpoints { /** - * @see https://docs.github.com/rest/reference/apps/#list-installations-for-the-authenticated-app + * @see https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app */ "GET /app/installations": { parameters: Endpoints["GET /app/installations"]["parameters"]; @@ -81,21 +81,21 @@ export interface PaginatingEndpoints { response: Endpoints["GET /events"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/gists/#list-gists-for-the-authenticated-user + * @see https://docs.github.com/rest/reference/gists#list-gists-for-the-authenticated-user */ "GET /gists": { parameters: Endpoints["GET /gists"]["parameters"]; response: Endpoints["GET /gists"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/gists/#list-public-gists + * @see https://docs.github.com/rest/reference/gists#list-public-gists */ "GET /gists/public": { parameters: Endpoints["GET /gists/public"]["parameters"]; response: Endpoints["GET /gists/public"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/gists/#list-starred-gists + * @see https://docs.github.com/rest/reference/gists#list-starred-gists */ "GET /gists/starred": { parameters: Endpoints["GET /gists/starred"]["parameters"]; @@ -109,14 +109,14 @@ export interface PaginatingEndpoints { response: Endpoints["GET /gists/{gist_id}/comments"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/gists/#list-gist-commits + * @see https://docs.github.com/rest/reference/gists#list-gist-commits */ "GET /gists/{gist_id}/commits": { parameters: Endpoints["GET /gists/{gist_id}/commits"]["parameters"]; response: Endpoints["GET /gists/{gist_id}/commits"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/gists/#list-gist-forks + * @see https://docs.github.com/rest/reference/gists#list-gist-forks */ "GET /gists/{gist_id}/forks": { parameters: Endpoints["GET /gists/{gist_id}/forks"]["parameters"]; @@ -132,7 +132,7 @@ export interface PaginatingEndpoints { }; }; /** - * @see https://docs.github.com/rest/reference/issues/#list-issues-assigned-to-the-authenticated-user + * @see https://docs.github.com/rest/reference/issues#list-issues-assigned-to-the-authenticated-user */ "GET /issues": { parameters: Endpoints["GET /issues"]["parameters"]; @@ -181,7 +181,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /notifications"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/orgs/#list-organizations + * @see https://docs.github.com/rest/reference/orgs#list-organizations */ "GET /organizations": { parameters: Endpoints["GET /organizations"]["parameters"]; @@ -265,7 +265,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /orgs/{org}/blocks"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/orgs/#list-saml-sso-authorizations-for-an-organization + * @see https://docs.github.com/rest/reference/orgs#list-saml-sso-authorizations-for-an-organization */ "GET /orgs/{org}/credential-authorizations": { parameters: Endpoints["GET /orgs/{org}/credential-authorizations"]["parameters"]; @@ -293,7 +293,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /orgs/{org}/hooks"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/orgs/#list-app-installations-for-an-organization + * @see https://docs.github.com/rest/reference/orgs#list-app-installations-for-an-organization */ "GET /orgs/{org}/installations": { parameters: Endpoints["GET /orgs/{org}/installations"]["parameters"]; @@ -316,7 +316,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /orgs/{org}/invitations/{invitation_id}/teams"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/issues/#list-organization-issues-assigned-to-the-authenticated-user + * @see https://docs.github.com/rest/reference/issues#list-organization-issues-assigned-to-the-authenticated-user */ "GET /orgs/{org}/issues": { parameters: Endpoints["GET /orgs/{org}/issues"]["parameters"]; @@ -351,7 +351,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /orgs/{org}/outside_collaborators"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/projects/#list-organization-projects + * @see https://docs.github.com/rest/reference/projects#list-organization-projects */ "GET /orgs/{org}/projects": { parameters: Endpoints["GET /orgs/{org}/projects"]["parameters"]; @@ -365,7 +365,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /orgs/{org}/public_members"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/repos/#list-organization-repositories + * @see https://docs.github.com/rest/reference/repos#list-organization-repositories */ "GET /orgs/{org}/repos": { parameters: Endpoints["GET /orgs/{org}/repos"]["parameters"]; @@ -381,7 +381,7 @@ export interface PaginatingEndpoints { }; }; /** - * @see https://docs.github.com/rest/reference/teams/#list-teams + * @see https://docs.github.com/rest/reference/teams#list-teams */ "GET /orgs/{org}/teams": { parameters: Endpoints["GET /orgs/{org}/teams"]["parameters"]; @@ -402,14 +402,14 @@ export interface PaginatingEndpoints { response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-comment + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment */ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": { parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["parameters"]; response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion */ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": { parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]["parameters"]; @@ -430,14 +430,14 @@ export interface PaginatingEndpoints { response: Endpoints["GET /orgs/{org}/teams/{team_slug}/members"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/teams/#list-team-projects + * @see https://docs.github.com/rest/reference/teams#list-team-projects */ "GET /orgs/{org}/teams/{team_slug}/projects": { parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/projects"]["parameters"]; response: Endpoints["GET /orgs/{org}/teams/{team_slug}/projects"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/teams/#list-team-repositories + * @see https://docs.github.com/rest/reference/teams#list-team-repositories */ "GET /orgs/{org}/teams/{team_slug}/repos": { parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/repos"]["parameters"]; @@ -453,7 +453,7 @@ export interface PaginatingEndpoints { }; }; /** - * @see https://docs.github.com/rest/reference/teams/#list-child-teams + * @see https://docs.github.com/rest/reference/teams#list-child-teams */ "GET /orgs/{org}/teams/{team_slug}/teams": { parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/teams"]["parameters"]; @@ -625,7 +625,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /repos/{owner}/{repo}/comments"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-commit-comment + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-commit-comment */ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions": { parameters: Endpoints["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"]["parameters"]; @@ -685,7 +685,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/statuses"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/repos/#list-repository-contributors + * @see https://docs.github.com/rest/reference/repos#list-repository-contributors */ "GET /repos/{owner}/{repo}/contributors": { parameters: Endpoints["GET /repos/{owner}/{repo}/contributors"]["parameters"]; @@ -741,7 +741,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /repos/{owner}/{repo}/invitations"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/issues/#list-repository-issues + * @see https://docs.github.com/rest/reference/issues#list-repository-issues */ "GET /repos/{owner}/{repo}/issues": { parameters: Endpoints["GET /repos/{owner}/{repo}/issues"]["parameters"]; @@ -755,7 +755,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /repos/{owner}/{repo}/issues/comments"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-an-issue-comment + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue-comment */ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": { parameters: Endpoints["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"]["parameters"]; @@ -790,7 +790,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-an-issue + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue */ "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions": { parameters: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"]["parameters"]; @@ -846,14 +846,14 @@ export interface PaginatingEndpoints { response: Endpoints["GET /repos/{owner}/{repo}/pages/builds"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/projects/#list-repository-projects + * @see https://docs.github.com/rest/reference/projects#list-repository-projects */ "GET /repos/{owner}/{repo}/projects": { parameters: Endpoints["GET /repos/{owner}/{repo}/projects"]["parameters"]; response: Endpoints["GET /repos/{owner}/{repo}/projects"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/pulls/#list-pull-requests + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests */ "GET /repos/{owner}/{repo}/pulls": { parameters: Endpoints["GET /repos/{owner}/{repo}/pulls"]["parameters"]; @@ -867,7 +867,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-pull-request-review-comment + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-pull-request-review-comment */ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": { parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"]["parameters"]; @@ -881,14 +881,14 @@ export interface PaginatingEndpoints { response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/pulls/#list-commits-on-a-pull-request + * @see https://docs.github.com/rest/reference/pulls#list-commits-on-a-pull-request */ "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits": { parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"]["parameters"]; response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/pulls/#list-pull-requests-files + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests-files */ "GET /repos/{owner}/{repo}/pulls/{pull_number}/files": { parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"]["parameters"]; @@ -953,21 +953,21 @@ export interface PaginatingEndpoints { response: Endpoints["GET /repos/{owner}/{repo}/subscribers"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/repos/#list-repository-tags + * @see https://docs.github.com/rest/reference/repos#list-repository-tags */ "GET /repos/{owner}/{repo}/tags": { parameters: Endpoints["GET /repos/{owner}/{repo}/tags"]["parameters"]; response: Endpoints["GET /repos/{owner}/{repo}/tags"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/repos/#list-repository-teams + * @see https://docs.github.com/rest/reference/repos#list-repository-teams */ "GET /repos/{owner}/{repo}/teams": { parameters: Endpoints["GET /repos/{owner}/{repo}/teams"]["parameters"]; response: Endpoints["GET /repos/{owner}/{repo}/teams"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/repos/#list-public-repositories + * @see https://docs.github.com/rest/reference/repos#list-public-repositories */ "GET /repositories": { parameters: Endpoints["GET /repositories"]["parameters"]; @@ -1001,7 +1001,7 @@ export interface PaginatingEndpoints { }; }; /** - * @see https://docs.github.com/rest/reference/scim/#list-scim-provisioned-identities + * @see https://docs.github.com/rest/reference/scim#list-scim-provisioned-identities */ "GET /scim/v2/organizations/{org}/Users": { parameters: Endpoints["GET /scim/v2/organizations/{org}/Users"]["parameters"]; @@ -1010,7 +1010,7 @@ export interface PaginatingEndpoints { }; }; /** - * @see https://docs.github.com/rest/reference/search/#search-code + * @see https://docs.github.com/rest/reference/search#search-code */ "GET /search/code": { parameters: Endpoints["GET /search/code"]["parameters"]; @@ -1019,7 +1019,7 @@ export interface PaginatingEndpoints { }; }; /** - * @see https://docs.github.com/rest/reference/search/#search-commits + * @see https://docs.github.com/rest/reference/search#search-commits */ "GET /search/commits": { parameters: Endpoints["GET /search/commits"]["parameters"]; @@ -1028,7 +1028,7 @@ export interface PaginatingEndpoints { }; }; /** - * @see https://docs.github.com/rest/reference/search/#search-issues-and-pull-requests + * @see https://docs.github.com/rest/reference/search#search-issues-and-pull-requests */ "GET /search/issues": { parameters: Endpoints["GET /search/issues"]["parameters"]; @@ -1037,7 +1037,7 @@ export interface PaginatingEndpoints { }; }; /** - * @see https://docs.github.com/rest/reference/search/#search-labels + * @see https://docs.github.com/rest/reference/search#search-labels */ "GET /search/labels": { parameters: Endpoints["GET /search/labels"]["parameters"]; @@ -1046,7 +1046,7 @@ export interface PaginatingEndpoints { }; }; /** - * @see https://docs.github.com/rest/reference/search/#search-repositories + * @see https://docs.github.com/rest/reference/search#search-repositories */ "GET /search/repositories": { parameters: Endpoints["GET /search/repositories"]["parameters"]; @@ -1055,7 +1055,7 @@ export interface PaginatingEndpoints { }; }; /** - * @see https://docs.github.com/rest/reference/search/#search-topics + * @see https://docs.github.com/rest/reference/search#search-topics */ "GET /search/topics": { parameters: Endpoints["GET /search/topics"]["parameters"]; @@ -1064,7 +1064,7 @@ export interface PaginatingEndpoints { }; }; /** - * @see https://docs.github.com/rest/reference/search/#search-users + * @see https://docs.github.com/rest/reference/search#search-users */ "GET /search/users": { parameters: Endpoints["GET /search/users"]["parameters"]; @@ -1198,7 +1198,7 @@ export interface PaginatingEndpoints { }; }; /** - * @see https://docs.github.com/rest/reference/issues/#list-user-account-issues-assigned-to-the-authenticated-user + * @see https://docs.github.com/rest/reference/issues#list-user-account-issues-assigned-to-the-authenticated-user */ "GET /user/issues": { parameters: Endpoints["GET /user/issues"]["parameters"]; @@ -1247,7 +1247,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /user/migrations/{migration_id}/repositories"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/orgs/#list-organizations-for-the-authenticated-user + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user */ "GET /user/orgs": { parameters: Endpoints["GET /user/orgs"]["parameters"]; @@ -1261,7 +1261,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /user/public_emails"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/repos/#list-repositories-for-the-authenticated-user + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-the-authenticated-user */ "GET /user/repos": { parameters: Endpoints["GET /user/repos"]["parameters"]; @@ -1289,14 +1289,14 @@ export interface PaginatingEndpoints { response: Endpoints["GET /user/subscriptions"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/teams/#list-teams-for-the-authenticated-user + * @see https://docs.github.com/rest/reference/teams#list-teams-for-the-authenticated-user */ "GET /user/teams": { parameters: Endpoints["GET /user/teams"]["parameters"]; response: Endpoints["GET /user/teams"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/users/#list-users + * @see https://docs.github.com/rest/reference/users#list-users */ "GET /users": { parameters: Endpoints["GET /users"]["parameters"]; @@ -1338,7 +1338,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /users/{username}/following"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/gists/#list-gists-for-a-user + * @see https://docs.github.com/rest/reference/gists#list-gists-for-a-user */ "GET /users/{username}/gists": { parameters: Endpoints["GET /users/{username}/gists"]["parameters"]; @@ -1359,14 +1359,14 @@ export interface PaginatingEndpoints { response: Endpoints["GET /users/{username}/keys"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/orgs/#list-organizations-for-a-user + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-a-user */ "GET /users/{username}/orgs": { parameters: Endpoints["GET /users/{username}/orgs"]["parameters"]; response: Endpoints["GET /users/{username}/orgs"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/projects/#list-user-projects + * @see https://docs.github.com/rest/reference/projects#list-user-projects */ "GET /users/{username}/projects": { parameters: Endpoints["GET /users/{username}/projects"]["parameters"]; @@ -1387,7 +1387,7 @@ export interface PaginatingEndpoints { response: Endpoints["GET /users/{username}/received_events/public"]["response"]; }; /** - * @see https://docs.github.com/rest/reference/repos/#list-repositories-for-a-user + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-a-user */ "GET /users/{username}/repos": { parameters: Endpoints["GET /users/{username}/repos"]["parameters"]; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/iterator.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/iterator.d.ts index 276f6d96..931d5307 100644 --- a/node_modules/@octokit/plugin-paginate-rest/dist-types/iterator.d.ts +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/iterator.d.ts @@ -8,6 +8,13 @@ export declare function iterator(octokit: Octokit, route: Route | RequestInterfa } | { value: import("@octokit/types/dist-types/OctokitResponse").OctokitResponse; done?: undefined; + } | { + value: { + status: number; + headers: {}; + data: never[]; + }; + done?: undefined; }>; }; }; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/types.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/types.d.ts index 2dcc8778..0634907b 100644 --- a/node_modules/@octokit/plugin-paginate-rest/dist-types/types.d.ts +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/types.d.ts @@ -21,8 +21,8 @@ declare type NormalizeResponse = T & { data: GetResultsType; }; declare type DataType = "data" extends keyof T ? T["data"] : unknown; -export interface MapFunction>, R = unknown[]> { - (response: T, done: () => void): R; +export interface MapFunction>, M = unknown[]> { + (response: T, done: () => void): M; } export declare type PaginationResults = T[]; export interface PaginateInterface { @@ -32,7 +32,7 @@ export interface PaginateInterface { * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. * @param {function} mapFn Optional method to map each response to a custom array */ - (options: OctokitTypes.EndpointOptions, mapFn: MapFunction>, R[]>): Promise>; + (options: OctokitTypes.EndpointOptions, mapFn: MapFunction>, M[]>): Promise>; /** * Paginate a request using endpoint options * @@ -45,7 +45,7 @@ export interface PaginateInterface { * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` * @param {function} mapFn Optional method to map each response to a custom array */ - (route: R, mapFn: MapFunction): Promise; + (route: R, mapFn: MapFunction): Promise; /** * Paginate a request using a known endpoint route string and parameters, and map each response to a custom array * @@ -53,7 +53,7 @@ export interface PaginateInterface { * @param {object} parameters URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. * @param {function} mapFn Optional method to map each response to a custom array */ - (route: R, parameters: PaginatingEndpoints[R]["parameters"], mapFn: MapFunction): Promise; + (route: R, parameters: PaginatingEndpoints[R]["parameters"], mapFn: MapFunction): Promise; /** * Paginate a request using an known endpoint route string * @@ -74,7 +74,7 @@ export interface PaginateInterface { * @param {string} request Request method (`octokit.request` or `@octokit/request`) * @param {function} mapFn? Optional method to map each response to a custom array */ - (request: R, mapFn: MapFunction>, MR>): Promise; + (request: R, mapFn: MapFunction>, M>): Promise; /** * Paginate a request using an endpoint method, parameters, and a map function * @@ -82,7 +82,7 @@ export interface PaginateInterface { * @param {object} parameters URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. * @param {function} mapFn? Optional method to map each response to a custom array */ - (request: R, parameters: Parameters[0], mapFn: MapFunction>, MR>): Promise; + (request: R, parameters: Parameters[0], mapFn: MapFunction>, M>): Promise; /** * Paginate a request using an endpoint method and parameters * @@ -132,7 +132,7 @@ export interface ComposePaginateInterface { * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. * @param {function} mapFn Optional method to map each response to a custom array */ - (octokit: Octokit, options: OctokitTypes.EndpointOptions, mapFn: MapFunction>, R[]>): Promise>; + (octokit: Octokit, options: OctokitTypes.EndpointOptions, mapFn: MapFunction>, M[]>): Promise>; /** * Paginate a request using endpoint options * @@ -147,7 +147,7 @@ export interface ComposePaginateInterface { * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` * @param {function} mapFn Optional method to map each response to a custom array */ - (octokit: Octokit, route: R, mapFn: MapFunction): Promise; + (octokit: Octokit, route: R, mapFn: MapFunction): Promise; /** * Paginate a request using a known endpoint route string and parameters, and map each response to a custom array * @@ -156,7 +156,7 @@ export interface ComposePaginateInterface { * @param {object} parameters URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. * @param {function} mapFn Optional method to map each response to a custom array */ - (octokit: Octokit, route: R, parameters: PaginatingEndpoints[R]["parameters"], mapFn: MapFunction): Promise; + (octokit: Octokit, route: R, parameters: PaginatingEndpoints[R]["parameters"], mapFn: MapFunction): Promise; /** * Paginate a request using an known endpoint route string * @@ -180,7 +180,7 @@ export interface ComposePaginateInterface { * @param {string} request Request method (`octokit.request` or `@octokit/request`) * @param {function} mapFn? Optional method to map each response to a custom array */ - (octokit: Octokit, request: R, mapFn: MapFunction>, MR>): Promise; + (octokit: Octokit, request: R, mapFn: MapFunction>, M>): Promise; /** * Paginate a request using an endpoint method, parameters, and a map function * @@ -189,7 +189,7 @@ export interface ComposePaginateInterface { * @param {object} parameters URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. * @param {function} mapFn? Optional method to map each response to a custom array */ - (octokit: Octokit, request: R, parameters: Parameters[0], mapFn: MapFunction>, MR>): Promise; + (octokit: Octokit, request: R, parameters: Parameters[0], mapFn: MapFunction>, M>): Promise; /** * Paginate a request using an endpoint method and parameters * diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts b/node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts index 0a015050..eaa7ef0b 100644 --- a/node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts +++ b/node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts @@ -1 +1 @@ -export declare const VERSION = "2.13.3"; +export declare const VERSION = "2.13.5"; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js b/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js index b81c5aa5..1825d524 100644 --- a/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js +++ b/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js @@ -1,4 +1,4 @@ -const VERSION = "2.13.3"; +const VERSION = "2.13.5"; /** * Some “list” response that can be paginated have a different response structure @@ -17,6 +17,13 @@ const VERSION = "2.13.3"; * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref */ function normalizePaginatedListResponse(response) { + // endpoints can respond with 204 if repository is empty + if (!response.data) { + return { + ...response, + data: [], + }; + } const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); if (!responseNeedsNormalization) return response; @@ -54,13 +61,27 @@ function iterator(octokit, route, parameters) { async next() { if (!url) return { done: true }; - const response = await requestMethod({ method, url, headers }); - const normalizedResponse = normalizePaginatedListResponse(response); - // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { value: normalizedResponse }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { value: normalizedResponse }; + } + catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [], + }, + }; + } }, }), }; diff --git a/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map b/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map index 3d0a4977..7ba2bf1b 100644 --- a/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map +++ b/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/normalize-paginated-list-response.js","../dist-src/iterator.js","../dist-src/paginate.js","../dist-src/compose-paginate.js","../dist-src/generated/paginating-endpoints.js","../dist-src/paginating-endpoints.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"2.13.3\";\n","/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nexport function normalizePaginatedListResponse(response) {\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization)\n return response;\n // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\n","import { normalizePaginatedListResponse } from \"./normalize-paginated-list-response\";\nexport function iterator(octokit, route, parameters) {\n const options = typeof route === \"function\"\n ? route.endpoint(parameters)\n : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url)\n return { done: true };\n const response = await requestMethod({ method, url, headers });\n const normalizedResponse = normalizePaginatedListResponse(response);\n // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return { value: normalizedResponse };\n },\n }),\n };\n}\n","import { iterator } from \"./iterator\";\nexport function paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then((result) => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator, mapFn);\n });\n}\n","import { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nexport const composePaginateRest = Object.assign(paginate, {\n iterator,\n});\n","export const paginatingEndpoints = [\n \"GET /app/installations\",\n \"GET /applications/grants\",\n \"GET /authorizations\",\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n \"GET /enterprises/{enterprise}/actions/runner-groups\",\n \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\",\n \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\",\n \"GET /enterprises/{enterprise}/actions/runners\",\n \"GET /enterprises/{enterprise}/actions/runners/downloads\",\n \"GET /events\",\n \"GET /gists\",\n \"GET /gists/public\",\n \"GET /gists/starred\",\n \"GET /gists/{gist_id}/comments\",\n \"GET /gists/{gist_id}/commits\",\n \"GET /gists/{gist_id}/forks\",\n \"GET /installation/repositories\",\n \"GET /issues\",\n \"GET /marketplace_listing/plans\",\n \"GET /marketplace_listing/plans/{plan_id}/accounts\",\n \"GET /marketplace_listing/stubbed/plans\",\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"GET /networks/{owner}/{repo}/events\",\n \"GET /notifications\",\n \"GET /organizations\",\n \"GET /orgs/{org}/actions/permissions/repositories\",\n \"GET /orgs/{org}/actions/runner-groups\",\n \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\",\n \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\",\n \"GET /orgs/{org}/actions/runners\",\n \"GET /orgs/{org}/actions/runners/downloads\",\n \"GET /orgs/{org}/actions/secrets\",\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/blocks\",\n \"GET /orgs/{org}/credential-authorizations\",\n \"GET /orgs/{org}/events\",\n \"GET /orgs/{org}/failed_invitations\",\n \"GET /orgs/{org}/hooks\",\n \"GET /orgs/{org}/installations\",\n \"GET /orgs/{org}/invitations\",\n \"GET /orgs/{org}/invitations/{invitation_id}/teams\",\n \"GET /orgs/{org}/issues\",\n \"GET /orgs/{org}/members\",\n \"GET /orgs/{org}/migrations\",\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n \"GET /orgs/{org}/outside_collaborators\",\n \"GET /orgs/{org}/projects\",\n \"GET /orgs/{org}/public_members\",\n \"GET /orgs/{org}/repos\",\n \"GET /orgs/{org}/team-sync/groups\",\n \"GET /orgs/{org}/teams\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n \"GET /orgs/{org}/teams/{team_slug}/members\",\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n \"GET /orgs/{org}/teams/{team_slug}/repos\",\n \"GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings\",\n \"GET /orgs/{org}/teams/{team_slug}/teams\",\n \"GET /projects/columns/{column_id}/cards\",\n \"GET /projects/{project_id}/collaborators\",\n \"GET /projects/{project_id}/columns\",\n \"GET /repos/{owner}/{repo}/actions/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runners\",\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\",\n \"GET /repos/{owner}/{repo}/actions/runs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/secrets\",\n \"GET /repos/{owner}/{repo}/actions/workflows\",\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n \"GET /repos/{owner}/{repo}/assignees\",\n \"GET /repos/{owner}/{repo}/branches\",\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n \"GET /repos/{owner}/{repo}/code-scanning/analyses\",\n \"GET /repos/{owner}/{repo}/collaborators\",\n \"GET /repos/{owner}/{repo}/comments\",\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/commits\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n \"GET /repos/{owner}/{repo}/contributors\",\n \"GET /repos/{owner}/{repo}/deployments\",\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n \"GET /repos/{owner}/{repo}/events\",\n \"GET /repos/{owner}/{repo}/forks\",\n \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\",\n \"GET /repos/{owner}/{repo}/hooks\",\n \"GET /repos/{owner}/{repo}/invitations\",\n \"GET /repos/{owner}/{repo}/issues\",\n \"GET /repos/{owner}/{repo}/issues/comments\",\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n \"GET /repos/{owner}/{repo}/keys\",\n \"GET /repos/{owner}/{repo}/labels\",\n \"GET /repos/{owner}/{repo}/milestones\",\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n \"GET /repos/{owner}/{repo}/notifications\",\n \"GET /repos/{owner}/{repo}/pages/builds\",\n \"GET /repos/{owner}/{repo}/projects\",\n \"GET /repos/{owner}/{repo}/pulls\",\n \"GET /repos/{owner}/{repo}/pulls/comments\",\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n \"GET /repos/{owner}/{repo}/releases\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/stargazers\",\n \"GET /repos/{owner}/{repo}/subscribers\",\n \"GET /repos/{owner}/{repo}/tags\",\n \"GET /repos/{owner}/{repo}/teams\",\n \"GET /repositories\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n \"GET /scim/v2/enterprises/{enterprise}/Groups\",\n \"GET /scim/v2/enterprises/{enterprise}/Users\",\n \"GET /scim/v2/organizations/{org}/Users\",\n \"GET /search/code\",\n \"GET /search/commits\",\n \"GET /search/issues\",\n \"GET /search/labels\",\n \"GET /search/repositories\",\n \"GET /search/topics\",\n \"GET /search/users\",\n \"GET /teams/{team_id}/discussions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\",\n \"GET /teams/{team_id}/invitations\",\n \"GET /teams/{team_id}/members\",\n \"GET /teams/{team_id}/projects\",\n \"GET /teams/{team_id}/repos\",\n \"GET /teams/{team_id}/team-sync/group-mappings\",\n \"GET /teams/{team_id}/teams\",\n \"GET /user/blocks\",\n \"GET /user/emails\",\n \"GET /user/followers\",\n \"GET /user/following\",\n \"GET /user/gpg_keys\",\n \"GET /user/installations\",\n \"GET /user/installations/{installation_id}/repositories\",\n \"GET /user/issues\",\n \"GET /user/keys\",\n \"GET /user/marketplace_purchases\",\n \"GET /user/marketplace_purchases/stubbed\",\n \"GET /user/memberships/orgs\",\n \"GET /user/migrations\",\n \"GET /user/migrations/{migration_id}/repositories\",\n \"GET /user/orgs\",\n \"GET /user/public_emails\",\n \"GET /user/repos\",\n \"GET /user/repository_invitations\",\n \"GET /user/starred\",\n \"GET /user/subscriptions\",\n \"GET /user/teams\",\n \"GET /users\",\n \"GET /users/{username}/events\",\n \"GET /users/{username}/events/orgs/{org}\",\n \"GET /users/{username}/events/public\",\n \"GET /users/{username}/followers\",\n \"GET /users/{username}/following\",\n \"GET /users/{username}/gists\",\n \"GET /users/{username}/gpg_keys\",\n \"GET /users/{username}/keys\",\n \"GET /users/{username}/orgs\",\n \"GET /users/{username}/projects\",\n \"GET /users/{username}/received_events\",\n \"GET /users/{username}/received_events/public\",\n \"GET /users/{username}/repos\",\n \"GET /users/{username}/starred\",\n \"GET /users/{username}/subscriptions\",\n];\n","import { paginatingEndpoints, } from \"./generated/paginating-endpoints\";\nexport { paginatingEndpoints } from \"./generated/paginating-endpoints\";\nexport function isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n }\n else {\n return false;\n }\n}\n","import { VERSION } from \"./version\";\nimport { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nexport { composePaginateRest } from \"./compose-paginate\";\nexport { isPaginatingEndpoint, paginatingEndpoints, } from \"./paginating-endpoints\";\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\nexport function paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit),\n }),\n };\n}\npaginateRest.VERSION = VERSION;\n"],"names":[],"mappings":"AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACA1C;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,AAAO,SAAS,8BAA8B,CAAC,QAAQ,EAAE;AACzD,IAAI,MAAM,0BAA0B,GAAG,aAAa,IAAI,QAAQ,CAAC,IAAI,IAAI,EAAE,KAAK,IAAI,QAAQ,CAAC,IAAI,CAAC,CAAC;AACnG,IAAI,IAAI,CAAC,0BAA0B;AACnC,QAAQ,OAAO,QAAQ,CAAC;AACxB;AACA;AACA,IAAI,MAAM,iBAAiB,GAAG,QAAQ,CAAC,IAAI,CAAC,kBAAkB,CAAC;AAC/D,IAAI,MAAM,mBAAmB,GAAG,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC;AACnE,IAAI,MAAM,UAAU,GAAG,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC;AACjD,IAAI,OAAO,QAAQ,CAAC,IAAI,CAAC,kBAAkB,CAAC;AAC5C,IAAI,OAAO,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC;AAC9C,IAAI,OAAO,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC;AACrC,IAAI,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,IAAI,MAAM,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AAC7C,IAAI,QAAQ,CAAC,IAAI,GAAG,IAAI,CAAC;AACzB,IAAI,IAAI,OAAO,iBAAiB,KAAK,WAAW,EAAE;AAClD,QAAQ,QAAQ,CAAC,IAAI,CAAC,kBAAkB,GAAG,iBAAiB,CAAC;AAC7D,KAAK;AACL,IAAI,IAAI,OAAO,mBAAmB,KAAK,WAAW,EAAE;AACpD,QAAQ,QAAQ,CAAC,IAAI,CAAC,oBAAoB,GAAG,mBAAmB,CAAC;AACjE,KAAK;AACL,IAAI,QAAQ,CAAC,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;AAC3C,IAAI,OAAO,QAAQ,CAAC;AACpB,CAAC;;ACtCM,SAAS,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE;AACrD,IAAI,MAAM,OAAO,GAAG,OAAO,KAAK,KAAK,UAAU;AAC/C,UAAU,KAAK,CAAC,QAAQ,CAAC,UAAU,CAAC;AACpC,UAAU,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;AACtD,IAAI,MAAM,aAAa,GAAG,OAAO,KAAK,KAAK,UAAU,GAAG,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC;AAChF,IAAI,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;AAClC,IAAI,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACpC,IAAI,IAAI,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;AAC1B,IAAI,OAAO;AACX,QAAQ,CAAC,MAAM,CAAC,aAAa,GAAG,OAAO;AACvC,YAAY,MAAM,IAAI,GAAG;AACzB,gBAAgB,IAAI,CAAC,GAAG;AACxB,oBAAoB,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;AAC1C,gBAAgB,MAAM,QAAQ,GAAG,MAAM,aAAa,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,CAAC,CAAC;AAC/E,gBAAgB,MAAM,kBAAkB,GAAG,8BAA8B,CAAC,QAAQ,CAAC,CAAC;AACpF;AACA;AACA;AACA,gBAAgB,GAAG,GAAG,CAAC,CAAC,kBAAkB,CAAC,OAAO,CAAC,IAAI,IAAI,EAAE,EAAE,KAAK,CAAC,yBAAyB,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC;AAC1G,gBAAgB,OAAO,EAAE,KAAK,EAAE,kBAAkB,EAAE,CAAC;AACrD,aAAa;AACb,SAAS,CAAC;AACV,KAAK,CAAC;AACN,CAAC;;ACvBM,SAAS,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,KAAK,EAAE;AAC5D,IAAI,IAAI,OAAO,UAAU,KAAK,UAAU,EAAE;AAC1C,QAAQ,KAAK,GAAG,UAAU,CAAC;AAC3B,QAAQ,UAAU,GAAG,SAAS,CAAC;AAC/B,KAAK;AACL,IAAI,OAAO,MAAM,CAAC,OAAO,EAAE,EAAE,EAAE,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;AACpG,CAAC;AACD,SAAS,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE;AACnD,IAAI,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK;AAC5C,QAAQ,IAAI,MAAM,CAAC,IAAI,EAAE;AACzB,YAAY,OAAO,OAAO,CAAC;AAC3B,SAAS;AACT,QAAQ,IAAI,SAAS,GAAG,KAAK,CAAC;AAC9B,QAAQ,SAAS,IAAI,GAAG;AACxB,YAAY,SAAS,GAAG,IAAI,CAAC;AAC7B,SAAS;AACT,QAAQ,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AACxF,QAAQ,IAAI,SAAS,EAAE;AACvB,YAAY,OAAO,OAAO,CAAC;AAC3B,SAAS;AACT,QAAQ,OAAO,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;AACzD,KAAK,CAAC,CAAC;AACP,CAAC;;ACrBW,MAAC,mBAAmB,GAAG,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE;AAC3D,IAAI,QAAQ;AACZ,CAAC,CAAC;;ACJU,MAAC,mBAAmB,GAAG;AACnC,IAAI,wBAAwB;AAC5B,IAAI,0BAA0B;AAC9B,IAAI,qBAAqB;AACzB,IAAI,iEAAiE;AACrE,IAAI,qDAAqD;AACzD,IAAI,qFAAqF;AACzF,IAAI,+EAA+E;AACnF,IAAI,+CAA+C;AACnD,IAAI,yDAAyD;AAC7D,IAAI,aAAa;AACjB,IAAI,YAAY;AAChB,IAAI,mBAAmB;AACvB,IAAI,oBAAoB;AACxB,IAAI,+BAA+B;AACnC,IAAI,8BAA8B;AAClC,IAAI,4BAA4B;AAChC,IAAI,gCAAgC;AACpC,IAAI,aAAa;AACjB,IAAI,gCAAgC;AACpC,IAAI,mDAAmD;AACvD,IAAI,wCAAwC;AAC5C,IAAI,2DAA2D;AAC/D,IAAI,qCAAqC;AACzC,IAAI,oBAAoB;AACxB,IAAI,oBAAoB;AACxB,IAAI,kDAAkD;AACtD,IAAI,uCAAuC;AAC3C,IAAI,sEAAsE;AAC1E,IAAI,iEAAiE;AACrE,IAAI,iCAAiC;AACrC,IAAI,2CAA2C;AAC/C,IAAI,iCAAiC;AACrC,IAAI,4DAA4D;AAChE,IAAI,wBAAwB;AAC5B,IAAI,2CAA2C;AAC/C,IAAI,wBAAwB;AAC5B,IAAI,oCAAoC;AACxC,IAAI,uBAAuB;AAC3B,IAAI,+BAA+B;AACnC,IAAI,6BAA6B;AACjC,IAAI,mDAAmD;AACvD,IAAI,wBAAwB;AAC5B,IAAI,yBAAyB;AAC7B,IAAI,4BAA4B;AAChC,IAAI,wDAAwD;AAC5D,IAAI,uCAAuC;AAC3C,IAAI,0BAA0B;AAC9B,IAAI,gCAAgC;AACpC,IAAI,uBAAuB;AAC3B,IAAI,kCAAkC;AACtC,IAAI,uBAAuB;AAC3B,IAAI,+CAA+C;AACnD,IAAI,4EAA4E;AAChF,IAAI,uGAAuG;AAC3G,IAAI,6EAA6E;AACjF,IAAI,+CAA+C;AACnD,IAAI,2CAA2C;AAC/C,IAAI,4CAA4C;AAChD,IAAI,yCAAyC;AAC7C,IAAI,4DAA4D;AAChE,IAAI,yCAAyC;AAC7C,IAAI,yCAAyC;AAC7C,IAAI,0CAA0C;AAC9C,IAAI,oCAAoC;AACxC,IAAI,6CAA6C;AACjD,IAAI,2CAA2C;AAC/C,IAAI,qDAAqD;AACzD,IAAI,wCAAwC;AAC5C,IAAI,2DAA2D;AAC/D,IAAI,sDAAsD;AAC1D,IAAI,2CAA2C;AAC/C,IAAI,6CAA6C;AACjD,IAAI,gEAAgE;AACpE,IAAI,qCAAqC;AACzC,IAAI,oCAAoC;AACxC,IAAI,iEAAiE;AACrE,IAAI,oEAAoE;AACxE,IAAI,gDAAgD;AACpD,IAAI,yEAAyE;AAC7E,IAAI,kDAAkD;AACtD,IAAI,yCAAyC;AAC7C,IAAI,oCAAoC;AACxC,IAAI,2DAA2D;AAC/D,IAAI,mCAAmC;AACvC,IAAI,oEAAoE;AACxE,IAAI,yDAAyD;AAC7D,IAAI,sDAAsD;AAC1D,IAAI,oDAAoD;AACxD,IAAI,sDAAsD;AAC1D,IAAI,kDAAkD;AACtD,IAAI,wCAAwC;AAC5C,IAAI,uCAAuC;AAC3C,IAAI,gEAAgE;AACpE,IAAI,kCAAkC;AACtC,IAAI,iCAAiC;AACrC,IAAI,mDAAmD;AACvD,IAAI,iCAAiC;AACrC,IAAI,uCAAuC;AAC3C,IAAI,kCAAkC;AACtC,IAAI,2CAA2C;AAC/C,IAAI,kEAAkE;AACtE,IAAI,yCAAyC;AAC7C,IAAI,0DAA0D;AAC9D,IAAI,wDAAwD;AAC5D,IAAI,wDAAwD;AAC5D,IAAI,2DAA2D;AAC/D,IAAI,0DAA0D;AAC9D,IAAI,gCAAgC;AACpC,IAAI,kCAAkC;AACtC,IAAI,sCAAsC;AAC1C,IAAI,gEAAgE;AACpE,IAAI,yCAAyC;AAC7C,IAAI,wCAAwC;AAC5C,IAAI,oCAAoC;AACxC,IAAI,iCAAiC;AACrC,IAAI,0CAA0C;AAC9C,IAAI,iEAAiE;AACrE,IAAI,wDAAwD;AAC5D,IAAI,uDAAuD;AAC3D,IAAI,qDAAqD;AACzD,IAAI,mEAAmE;AACvE,IAAI,uDAAuD;AAC3D,IAAI,4EAA4E;AAChF,IAAI,oCAAoC;AACxC,IAAI,wDAAwD;AAC5D,IAAI,kDAAkD;AACtD,IAAI,sCAAsC;AAC1C,IAAI,uCAAuC;AAC3C,IAAI,gCAAgC;AACpC,IAAI,iCAAiC;AACrC,IAAI,mBAAmB;AACvB,IAAI,2EAA2E;AAC/E,IAAI,8CAA8C;AAClD,IAAI,6CAA6C;AACjD,IAAI,wCAAwC;AAC5C,IAAI,kBAAkB;AACtB,IAAI,qBAAqB;AACzB,IAAI,oBAAoB;AACxB,IAAI,oBAAoB;AACxB,IAAI,0BAA0B;AAC9B,IAAI,oBAAoB;AACxB,IAAI,mBAAmB;AACvB,IAAI,kCAAkC;AACtC,IAAI,+DAA+D;AACnE,IAAI,0FAA0F;AAC9F,IAAI,gEAAgE;AACpE,IAAI,kCAAkC;AACtC,IAAI,8BAA8B;AAClC,IAAI,+BAA+B;AACnC,IAAI,4BAA4B;AAChC,IAAI,+CAA+C;AACnD,IAAI,4BAA4B;AAChC,IAAI,kBAAkB;AACtB,IAAI,kBAAkB;AACtB,IAAI,qBAAqB;AACzB,IAAI,qBAAqB;AACzB,IAAI,oBAAoB;AACxB,IAAI,yBAAyB;AAC7B,IAAI,wDAAwD;AAC5D,IAAI,kBAAkB;AACtB,IAAI,gBAAgB;AACpB,IAAI,iCAAiC;AACrC,IAAI,yCAAyC;AAC7C,IAAI,4BAA4B;AAChC,IAAI,sBAAsB;AAC1B,IAAI,kDAAkD;AACtD,IAAI,gBAAgB;AACpB,IAAI,yBAAyB;AAC7B,IAAI,iBAAiB;AACrB,IAAI,kCAAkC;AACtC,IAAI,mBAAmB;AACvB,IAAI,yBAAyB;AAC7B,IAAI,iBAAiB;AACrB,IAAI,YAAY;AAChB,IAAI,8BAA8B;AAClC,IAAI,yCAAyC;AAC7C,IAAI,qCAAqC;AACzC,IAAI,iCAAiC;AACrC,IAAI,iCAAiC;AACrC,IAAI,6BAA6B;AACjC,IAAI,gCAAgC;AACpC,IAAI,4BAA4B;AAChC,IAAI,4BAA4B;AAChC,IAAI,gCAAgC;AACpC,IAAI,uCAAuC;AAC3C,IAAI,8CAA8C;AAClD,IAAI,6BAA6B;AACjC,IAAI,+BAA+B;AACnC,IAAI,qCAAqC;AACzC,CAAC;;AC5LM,SAAS,oBAAoB,CAAC,GAAG,EAAE;AAC1C,IAAI,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;AACjC,QAAQ,OAAO,mBAAmB,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;AACjD,KAAK;AACL,SAAS;AACT,QAAQ,OAAO,KAAK,CAAC;AACrB,KAAK;AACL,CAAC;;ACJD;AACA;AACA;AACA;AACA,AAAO,SAAS,YAAY,CAAC,OAAO,EAAE;AACtC,IAAI,OAAO;AACX,QAAQ,QAAQ,EAAE,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE;AAC9D,YAAY,QAAQ,EAAE,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC;AAClD,SAAS,CAAC;AACV,KAAK,CAAC;AACN,CAAC;AACD,YAAY,CAAC,OAAO,GAAG,OAAO,CAAC;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/normalize-paginated-list-response.js","../dist-src/iterator.js","../dist-src/paginate.js","../dist-src/compose-paginate.js","../dist-src/generated/paginating-endpoints.js","../dist-src/paginating-endpoints.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"2.13.5\";\n","/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nexport function normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return {\n ...response,\n data: [],\n };\n }\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization)\n return response;\n // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\n","import { normalizePaginatedListResponse } from \"./normalize-paginated-list-response\";\nexport function iterator(octokit, route, parameters) {\n const options = typeof route === \"function\"\n ? route.endpoint(parameters)\n : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url)\n return { done: true };\n try {\n const response = await requestMethod({ method, url, headers });\n const normalizedResponse = normalizePaginatedListResponse(response);\n // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return { value: normalizedResponse };\n }\n catch (error) {\n if (error.status !== 409)\n throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: [],\n },\n };\n }\n },\n }),\n };\n}\n","import { iterator } from \"./iterator\";\nexport function paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then((result) => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator, mapFn);\n });\n}\n","import { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nexport const composePaginateRest = Object.assign(paginate, {\n iterator,\n});\n","export const paginatingEndpoints = [\n \"GET /app/installations\",\n \"GET /applications/grants\",\n \"GET /authorizations\",\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n \"GET /enterprises/{enterprise}/actions/runner-groups\",\n \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\",\n \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\",\n \"GET /enterprises/{enterprise}/actions/runners\",\n \"GET /enterprises/{enterprise}/actions/runners/downloads\",\n \"GET /events\",\n \"GET /gists\",\n \"GET /gists/public\",\n \"GET /gists/starred\",\n \"GET /gists/{gist_id}/comments\",\n \"GET /gists/{gist_id}/commits\",\n \"GET /gists/{gist_id}/forks\",\n \"GET /installation/repositories\",\n \"GET /issues\",\n \"GET /marketplace_listing/plans\",\n \"GET /marketplace_listing/plans/{plan_id}/accounts\",\n \"GET /marketplace_listing/stubbed/plans\",\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"GET /networks/{owner}/{repo}/events\",\n \"GET /notifications\",\n \"GET /organizations\",\n \"GET /orgs/{org}/actions/permissions/repositories\",\n \"GET /orgs/{org}/actions/runner-groups\",\n \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\",\n \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\",\n \"GET /orgs/{org}/actions/runners\",\n \"GET /orgs/{org}/actions/runners/downloads\",\n \"GET /orgs/{org}/actions/secrets\",\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/blocks\",\n \"GET /orgs/{org}/credential-authorizations\",\n \"GET /orgs/{org}/events\",\n \"GET /orgs/{org}/failed_invitations\",\n \"GET /orgs/{org}/hooks\",\n \"GET /orgs/{org}/installations\",\n \"GET /orgs/{org}/invitations\",\n \"GET /orgs/{org}/invitations/{invitation_id}/teams\",\n \"GET /orgs/{org}/issues\",\n \"GET /orgs/{org}/members\",\n \"GET /orgs/{org}/migrations\",\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n \"GET /orgs/{org}/outside_collaborators\",\n \"GET /orgs/{org}/projects\",\n \"GET /orgs/{org}/public_members\",\n \"GET /orgs/{org}/repos\",\n \"GET /orgs/{org}/team-sync/groups\",\n \"GET /orgs/{org}/teams\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n \"GET /orgs/{org}/teams/{team_slug}/members\",\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n \"GET /orgs/{org}/teams/{team_slug}/repos\",\n \"GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings\",\n \"GET /orgs/{org}/teams/{team_slug}/teams\",\n \"GET /projects/columns/{column_id}/cards\",\n \"GET /projects/{project_id}/collaborators\",\n \"GET /projects/{project_id}/columns\",\n \"GET /repos/{owner}/{repo}/actions/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runners\",\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\",\n \"GET /repos/{owner}/{repo}/actions/runs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/secrets\",\n \"GET /repos/{owner}/{repo}/actions/workflows\",\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n \"GET /repos/{owner}/{repo}/assignees\",\n \"GET /repos/{owner}/{repo}/branches\",\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n \"GET /repos/{owner}/{repo}/code-scanning/analyses\",\n \"GET /repos/{owner}/{repo}/collaborators\",\n \"GET /repos/{owner}/{repo}/comments\",\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/commits\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n \"GET /repos/{owner}/{repo}/contributors\",\n \"GET /repos/{owner}/{repo}/deployments\",\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n \"GET /repos/{owner}/{repo}/events\",\n \"GET /repos/{owner}/{repo}/forks\",\n \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\",\n \"GET /repos/{owner}/{repo}/hooks\",\n \"GET /repos/{owner}/{repo}/invitations\",\n \"GET /repos/{owner}/{repo}/issues\",\n \"GET /repos/{owner}/{repo}/issues/comments\",\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n \"GET /repos/{owner}/{repo}/keys\",\n \"GET /repos/{owner}/{repo}/labels\",\n \"GET /repos/{owner}/{repo}/milestones\",\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n \"GET /repos/{owner}/{repo}/notifications\",\n \"GET /repos/{owner}/{repo}/pages/builds\",\n \"GET /repos/{owner}/{repo}/projects\",\n \"GET /repos/{owner}/{repo}/pulls\",\n \"GET /repos/{owner}/{repo}/pulls/comments\",\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n \"GET /repos/{owner}/{repo}/releases\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/stargazers\",\n \"GET /repos/{owner}/{repo}/subscribers\",\n \"GET /repos/{owner}/{repo}/tags\",\n \"GET /repos/{owner}/{repo}/teams\",\n \"GET /repositories\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n \"GET /scim/v2/enterprises/{enterprise}/Groups\",\n \"GET /scim/v2/enterprises/{enterprise}/Users\",\n \"GET /scim/v2/organizations/{org}/Users\",\n \"GET /search/code\",\n \"GET /search/commits\",\n \"GET /search/issues\",\n \"GET /search/labels\",\n \"GET /search/repositories\",\n \"GET /search/topics\",\n \"GET /search/users\",\n \"GET /teams/{team_id}/discussions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\",\n \"GET /teams/{team_id}/invitations\",\n \"GET /teams/{team_id}/members\",\n \"GET /teams/{team_id}/projects\",\n \"GET /teams/{team_id}/repos\",\n \"GET /teams/{team_id}/team-sync/group-mappings\",\n \"GET /teams/{team_id}/teams\",\n \"GET /user/blocks\",\n \"GET /user/emails\",\n \"GET /user/followers\",\n \"GET /user/following\",\n \"GET /user/gpg_keys\",\n \"GET /user/installations\",\n \"GET /user/installations/{installation_id}/repositories\",\n \"GET /user/issues\",\n \"GET /user/keys\",\n \"GET /user/marketplace_purchases\",\n \"GET /user/marketplace_purchases/stubbed\",\n \"GET /user/memberships/orgs\",\n \"GET /user/migrations\",\n \"GET /user/migrations/{migration_id}/repositories\",\n \"GET /user/orgs\",\n \"GET /user/public_emails\",\n \"GET /user/repos\",\n \"GET /user/repository_invitations\",\n \"GET /user/starred\",\n \"GET /user/subscriptions\",\n \"GET /user/teams\",\n \"GET /users\",\n \"GET /users/{username}/events\",\n \"GET /users/{username}/events/orgs/{org}\",\n \"GET /users/{username}/events/public\",\n \"GET /users/{username}/followers\",\n \"GET /users/{username}/following\",\n \"GET /users/{username}/gists\",\n \"GET /users/{username}/gpg_keys\",\n \"GET /users/{username}/keys\",\n \"GET /users/{username}/orgs\",\n \"GET /users/{username}/projects\",\n \"GET /users/{username}/received_events\",\n \"GET /users/{username}/received_events/public\",\n \"GET /users/{username}/repos\",\n \"GET /users/{username}/starred\",\n \"GET /users/{username}/subscriptions\",\n];\n","import { paginatingEndpoints, } from \"./generated/paginating-endpoints\";\nexport { paginatingEndpoints } from \"./generated/paginating-endpoints\";\nexport function isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n }\n else {\n return false;\n }\n}\n","import { VERSION } from \"./version\";\nimport { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nexport { composePaginateRest } from \"./compose-paginate\";\nexport { isPaginatingEndpoint, paginatingEndpoints, } from \"./paginating-endpoints\";\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\nexport function paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit),\n }),\n };\n}\npaginateRest.VERSION = VERSION;\n"],"names":[],"mappings":"AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACA1C;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,AAAO,SAAS,8BAA8B,CAAC,QAAQ,EAAE;AACzD;AACA,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE;AACxB,QAAQ,OAAO;AACf,YAAY,GAAG,QAAQ;AACvB,YAAY,IAAI,EAAE,EAAE;AACpB,SAAS,CAAC;AACV,KAAK;AACL,IAAI,MAAM,0BAA0B,GAAG,aAAa,IAAI,QAAQ,CAAC,IAAI,IAAI,EAAE,KAAK,IAAI,QAAQ,CAAC,IAAI,CAAC,CAAC;AACnG,IAAI,IAAI,CAAC,0BAA0B;AACnC,QAAQ,OAAO,QAAQ,CAAC;AACxB;AACA;AACA,IAAI,MAAM,iBAAiB,GAAG,QAAQ,CAAC,IAAI,CAAC,kBAAkB,CAAC;AAC/D,IAAI,MAAM,mBAAmB,GAAG,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC;AACnE,IAAI,MAAM,UAAU,GAAG,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC;AACjD,IAAI,OAAO,QAAQ,CAAC,IAAI,CAAC,kBAAkB,CAAC;AAC5C,IAAI,OAAO,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC;AAC9C,IAAI,OAAO,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC;AACrC,IAAI,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,IAAI,MAAM,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AAC7C,IAAI,QAAQ,CAAC,IAAI,GAAG,IAAI,CAAC;AACzB,IAAI,IAAI,OAAO,iBAAiB,KAAK,WAAW,EAAE;AAClD,QAAQ,QAAQ,CAAC,IAAI,CAAC,kBAAkB,GAAG,iBAAiB,CAAC;AAC7D,KAAK;AACL,IAAI,IAAI,OAAO,mBAAmB,KAAK,WAAW,EAAE;AACpD,QAAQ,QAAQ,CAAC,IAAI,CAAC,oBAAoB,GAAG,mBAAmB,CAAC;AACjE,KAAK;AACL,IAAI,QAAQ,CAAC,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;AAC3C,IAAI,OAAO,QAAQ,CAAC;AACpB,CAAC;;AC7CM,SAAS,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE;AACrD,IAAI,MAAM,OAAO,GAAG,OAAO,KAAK,KAAK,UAAU;AAC/C,UAAU,KAAK,CAAC,QAAQ,CAAC,UAAU,CAAC;AACpC,UAAU,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;AACtD,IAAI,MAAM,aAAa,GAAG,OAAO,KAAK,KAAK,UAAU,GAAG,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC;AAChF,IAAI,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;AAClC,IAAI,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACpC,IAAI,IAAI,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;AAC1B,IAAI,OAAO;AACX,QAAQ,CAAC,MAAM,CAAC,aAAa,GAAG,OAAO;AACvC,YAAY,MAAM,IAAI,GAAG;AACzB,gBAAgB,IAAI,CAAC,GAAG;AACxB,oBAAoB,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;AAC1C,gBAAgB,IAAI;AACpB,oBAAoB,MAAM,QAAQ,GAAG,MAAM,aAAa,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,CAAC,CAAC;AACnF,oBAAoB,MAAM,kBAAkB,GAAG,8BAA8B,CAAC,QAAQ,CAAC,CAAC;AACxF;AACA;AACA;AACA,oBAAoB,GAAG,GAAG,CAAC,CAAC,kBAAkB,CAAC,OAAO,CAAC,IAAI,IAAI,EAAE,EAAE,KAAK,CAAC,yBAAyB,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC;AAC9G,oBAAoB,OAAO,EAAE,KAAK,EAAE,kBAAkB,EAAE,CAAC;AACzD,iBAAiB;AACjB,gBAAgB,OAAO,KAAK,EAAE;AAC9B,oBAAoB,IAAI,KAAK,CAAC,MAAM,KAAK,GAAG;AAC5C,wBAAwB,MAAM,KAAK,CAAC;AACpC,oBAAoB,GAAG,GAAG,EAAE,CAAC;AAC7B,oBAAoB,OAAO;AAC3B,wBAAwB,KAAK,EAAE;AAC/B,4BAA4B,MAAM,EAAE,GAAG;AACvC,4BAA4B,OAAO,EAAE,EAAE;AACvC,4BAA4B,IAAI,EAAE,EAAE;AACpC,yBAAyB;AACzB,qBAAqB,CAAC;AACtB,iBAAiB;AACjB,aAAa;AACb,SAAS,CAAC;AACV,KAAK,CAAC;AACN,CAAC;;ACrCM,SAAS,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,KAAK,EAAE;AAC5D,IAAI,IAAI,OAAO,UAAU,KAAK,UAAU,EAAE;AAC1C,QAAQ,KAAK,GAAG,UAAU,CAAC;AAC3B,QAAQ,UAAU,GAAG,SAAS,CAAC;AAC/B,KAAK;AACL,IAAI,OAAO,MAAM,CAAC,OAAO,EAAE,EAAE,EAAE,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;AACpG,CAAC;AACD,SAAS,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE;AACnD,IAAI,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK;AAC5C,QAAQ,IAAI,MAAM,CAAC,IAAI,EAAE;AACzB,YAAY,OAAO,OAAO,CAAC;AAC3B,SAAS;AACT,QAAQ,IAAI,SAAS,GAAG,KAAK,CAAC;AAC9B,QAAQ,SAAS,IAAI,GAAG;AACxB,YAAY,SAAS,GAAG,IAAI,CAAC;AAC7B,SAAS;AACT,QAAQ,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AACxF,QAAQ,IAAI,SAAS,EAAE;AACvB,YAAY,OAAO,OAAO,CAAC;AAC3B,SAAS;AACT,QAAQ,OAAO,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;AACzD,KAAK,CAAC,CAAC;AACP,CAAC;;ACrBW,MAAC,mBAAmB,GAAG,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE;AAC3D,IAAI,QAAQ;AACZ,CAAC,CAAC;;ACJU,MAAC,mBAAmB,GAAG;AACnC,IAAI,wBAAwB;AAC5B,IAAI,0BAA0B;AAC9B,IAAI,qBAAqB;AACzB,IAAI,iEAAiE;AACrE,IAAI,qDAAqD;AACzD,IAAI,qFAAqF;AACzF,IAAI,+EAA+E;AACnF,IAAI,+CAA+C;AACnD,IAAI,yDAAyD;AAC7D,IAAI,aAAa;AACjB,IAAI,YAAY;AAChB,IAAI,mBAAmB;AACvB,IAAI,oBAAoB;AACxB,IAAI,+BAA+B;AACnC,IAAI,8BAA8B;AAClC,IAAI,4BAA4B;AAChC,IAAI,gCAAgC;AACpC,IAAI,aAAa;AACjB,IAAI,gCAAgC;AACpC,IAAI,mDAAmD;AACvD,IAAI,wCAAwC;AAC5C,IAAI,2DAA2D;AAC/D,IAAI,qCAAqC;AACzC,IAAI,oBAAoB;AACxB,IAAI,oBAAoB;AACxB,IAAI,kDAAkD;AACtD,IAAI,uCAAuC;AAC3C,IAAI,sEAAsE;AAC1E,IAAI,iEAAiE;AACrE,IAAI,iCAAiC;AACrC,IAAI,2CAA2C;AAC/C,IAAI,iCAAiC;AACrC,IAAI,4DAA4D;AAChE,IAAI,wBAAwB;AAC5B,IAAI,2CAA2C;AAC/C,IAAI,wBAAwB;AAC5B,IAAI,oCAAoC;AACxC,IAAI,uBAAuB;AAC3B,IAAI,+BAA+B;AACnC,IAAI,6BAA6B;AACjC,IAAI,mDAAmD;AACvD,IAAI,wBAAwB;AAC5B,IAAI,yBAAyB;AAC7B,IAAI,4BAA4B;AAChC,IAAI,wDAAwD;AAC5D,IAAI,uCAAuC;AAC3C,IAAI,0BAA0B;AAC9B,IAAI,gCAAgC;AACpC,IAAI,uBAAuB;AAC3B,IAAI,kCAAkC;AACtC,IAAI,uBAAuB;AAC3B,IAAI,+CAA+C;AACnD,IAAI,4EAA4E;AAChF,IAAI,uGAAuG;AAC3G,IAAI,6EAA6E;AACjF,IAAI,+CAA+C;AACnD,IAAI,2CAA2C;AAC/C,IAAI,4CAA4C;AAChD,IAAI,yCAAyC;AAC7C,IAAI,4DAA4D;AAChE,IAAI,yCAAyC;AAC7C,IAAI,yCAAyC;AAC7C,IAAI,0CAA0C;AAC9C,IAAI,oCAAoC;AACxC,IAAI,6CAA6C;AACjD,IAAI,2CAA2C;AAC/C,IAAI,qDAAqD;AACzD,IAAI,wCAAwC;AAC5C,IAAI,2DAA2D;AAC/D,IAAI,sDAAsD;AAC1D,IAAI,2CAA2C;AAC/C,IAAI,6CAA6C;AACjD,IAAI,gEAAgE;AACpE,IAAI,qCAAqC;AACzC,IAAI,oCAAoC;AACxC,IAAI,iEAAiE;AACrE,IAAI,oEAAoE;AACxE,IAAI,gDAAgD;AACpD,IAAI,yEAAyE;AAC7E,IAAI,kDAAkD;AACtD,IAAI,yCAAyC;AAC7C,IAAI,oCAAoC;AACxC,IAAI,2DAA2D;AAC/D,IAAI,mCAAmC;AACvC,IAAI,oEAAoE;AACxE,IAAI,yDAAyD;AAC7D,IAAI,sDAAsD;AAC1D,IAAI,oDAAoD;AACxD,IAAI,sDAAsD;AAC1D,IAAI,kDAAkD;AACtD,IAAI,wCAAwC;AAC5C,IAAI,uCAAuC;AAC3C,IAAI,gEAAgE;AACpE,IAAI,kCAAkC;AACtC,IAAI,iCAAiC;AACrC,IAAI,mDAAmD;AACvD,IAAI,iCAAiC;AACrC,IAAI,uCAAuC;AAC3C,IAAI,kCAAkC;AACtC,IAAI,2CAA2C;AAC/C,IAAI,kEAAkE;AACtE,IAAI,yCAAyC;AAC7C,IAAI,0DAA0D;AAC9D,IAAI,wDAAwD;AAC5D,IAAI,wDAAwD;AAC5D,IAAI,2DAA2D;AAC/D,IAAI,0DAA0D;AAC9D,IAAI,gCAAgC;AACpC,IAAI,kCAAkC;AACtC,IAAI,sCAAsC;AAC1C,IAAI,gEAAgE;AACpE,IAAI,yCAAyC;AAC7C,IAAI,wCAAwC;AAC5C,IAAI,oCAAoC;AACxC,IAAI,iCAAiC;AACrC,IAAI,0CAA0C;AAC9C,IAAI,iEAAiE;AACrE,IAAI,wDAAwD;AAC5D,IAAI,uDAAuD;AAC3D,IAAI,qDAAqD;AACzD,IAAI,mEAAmE;AACvE,IAAI,uDAAuD;AAC3D,IAAI,4EAA4E;AAChF,IAAI,oCAAoC;AACxC,IAAI,wDAAwD;AAC5D,IAAI,kDAAkD;AACtD,IAAI,sCAAsC;AAC1C,IAAI,uCAAuC;AAC3C,IAAI,gCAAgC;AACpC,IAAI,iCAAiC;AACrC,IAAI,mBAAmB;AACvB,IAAI,2EAA2E;AAC/E,IAAI,8CAA8C;AAClD,IAAI,6CAA6C;AACjD,IAAI,wCAAwC;AAC5C,IAAI,kBAAkB;AACtB,IAAI,qBAAqB;AACzB,IAAI,oBAAoB;AACxB,IAAI,oBAAoB;AACxB,IAAI,0BAA0B;AAC9B,IAAI,oBAAoB;AACxB,IAAI,mBAAmB;AACvB,IAAI,kCAAkC;AACtC,IAAI,+DAA+D;AACnE,IAAI,0FAA0F;AAC9F,IAAI,gEAAgE;AACpE,IAAI,kCAAkC;AACtC,IAAI,8BAA8B;AAClC,IAAI,+BAA+B;AACnC,IAAI,4BAA4B;AAChC,IAAI,+CAA+C;AACnD,IAAI,4BAA4B;AAChC,IAAI,kBAAkB;AACtB,IAAI,kBAAkB;AACtB,IAAI,qBAAqB;AACzB,IAAI,qBAAqB;AACzB,IAAI,oBAAoB;AACxB,IAAI,yBAAyB;AAC7B,IAAI,wDAAwD;AAC5D,IAAI,kBAAkB;AACtB,IAAI,gBAAgB;AACpB,IAAI,iCAAiC;AACrC,IAAI,yCAAyC;AAC7C,IAAI,4BAA4B;AAChC,IAAI,sBAAsB;AAC1B,IAAI,kDAAkD;AACtD,IAAI,gBAAgB;AACpB,IAAI,yBAAyB;AAC7B,IAAI,iBAAiB;AACrB,IAAI,kCAAkC;AACtC,IAAI,mBAAmB;AACvB,IAAI,yBAAyB;AAC7B,IAAI,iBAAiB;AACrB,IAAI,YAAY;AAChB,IAAI,8BAA8B;AAClC,IAAI,yCAAyC;AAC7C,IAAI,qCAAqC;AACzC,IAAI,iCAAiC;AACrC,IAAI,iCAAiC;AACrC,IAAI,6BAA6B;AACjC,IAAI,gCAAgC;AACpC,IAAI,4BAA4B;AAChC,IAAI,4BAA4B;AAChC,IAAI,gCAAgC;AACpC,IAAI,uCAAuC;AAC3C,IAAI,8CAA8C;AAClD,IAAI,6BAA6B;AACjC,IAAI,+BAA+B;AACnC,IAAI,qCAAqC;AACzC,CAAC;;AC5LM,SAAS,oBAAoB,CAAC,GAAG,EAAE;AAC1C,IAAI,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;AACjC,QAAQ,OAAO,mBAAmB,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;AACjD,KAAK;AACL,SAAS;AACT,QAAQ,OAAO,KAAK,CAAC;AACrB,KAAK;AACL,CAAC;;ACJD;AACA;AACA;AACA;AACA,AAAO,SAAS,YAAY,CAAC,OAAO,EAAE;AACtC,IAAI,OAAO;AACX,QAAQ,QAAQ,EAAE,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE;AAC9D,YAAY,QAAQ,EAAE,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC;AAClD,SAAS,CAAC;AACV,KAAK,CAAC;AACN,CAAC;AACD,YAAY,CAAC,OAAO,GAAG,OAAO,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/plugin-paginate-rest/package.json b/node_modules/@octokit/plugin-paginate-rest/package.json index a9748957..011a252a 100644 --- a/node_modules/@octokit/plugin-paginate-rest/package.json +++ b/node_modules/@octokit/plugin-paginate-rest/package.json @@ -1,7 +1,7 @@ { "name": "@octokit/plugin-paginate-rest", "description": "Octokit plugin to paginate REST API endpoint responses", - "version": "2.13.3", + "version": "2.13.5", "license": "MIT", "files": [ "dist-*/", @@ -17,14 +17,14 @@ ], "repository": "github:octokit/plugin-paginate-rest.js", "dependencies": { - "@octokit/types": "^6.11.0" + "@octokit/types": "^6.13.0" }, "peerDependencies": { "@octokit/core": ">=2" }, "devDependencies": { "@octokit/core": "^3.0.0", - "@octokit/plugin-rest-endpoint-methods": "^4.0.0", + "@octokit/plugin-rest-endpoint-methods": "^5.0.0", "@pika/pack": "^0.5.0", "@pika/plugin-build-node": "^0.9.0", "@pika/plugin-build-web": "^0.9.0", @@ -33,12 +33,12 @@ "@types/jest": "^26.0.0", "@types/node": "^14.0.4", "fetch-mock": "^9.0.0", - "jest": "^26.0.1", + "jest": "^27.0.0", "npm-run-all": "^4.1.5", - "prettier": "^2.0.4", + "prettier": "2.3.1", "semantic-release": "^17.0.0", "semantic-release-plugin-update-version-in-files": "^1.0.0", - "ts-jest": "^26.0.0", + "ts-jest": "^27.0.0-next.12", "typescript": "^4.0.2" }, "publishConfig": { diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/README.md b/node_modules/@octokit/plugin-rest-endpoint-methods/README.md index f4a7bbd8..8a17a79a 100644 --- a/node_modules/@octokit/plugin-rest-endpoint-methods/README.md +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/README.md @@ -59,8 +59,10 @@ Example ```ts import { RestEndpointMethodTypes } from "@octokit/plugin-rest-endpoint-methods"; -type UpdateLabelParameters = RestEndpointMethodTypes["issues"]["updateLabel"]["parameters"]; -type UpdateLabelResponse = RestEndpointMethodTypes["issues"]["updateLabel"]["response"]; +type UpdateLabelParameters = + RestEndpointMethodTypes["issues"]["updateLabel"]["parameters"]; +type UpdateLabelResponse = + RestEndpointMethodTypes["issues"]["updateLabel"]["response"]; ``` In order to get types beyond parameters and responses, check out [`@octokit/openapi-types`](https://github.com/octokit/openapi-types.ts/#readme), which is a direct transpliation from GitHub's official OpenAPI specification. diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js index 5a5a33d7..209196ee 100644 --- a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js @@ -2,29 +2,18 @@ Object.defineProperty(exports, '__esModule', { value: true }); -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); - if (enumerableOnly) symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); + + if (enumerableOnly) { + symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + } + keys.push.apply(keys, symbols); } @@ -51,9 +40,25 @@ function _objectSpread2(target) { return target; } +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + const Endpoints = { actions: { addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], @@ -167,6 +172,11 @@ const Endpoints = { previews: ["corsair"] } }], + createContentAttachmentForRepo: ["POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments", { + mediaType: { + previews: ["corsair"] + } + }], createFromManifest: ["POST /app-manifests/{code}/conversions"], createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], deleteAuthorization: ["DELETE /applications/{client_id}/grant"], @@ -229,8 +239,11 @@ const Endpoints = { }], getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], + listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { + renamed: ["codeScanning", "listAlertInstances"] + }], listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] @@ -712,6 +725,11 @@ const Endpoints = { previews: ["squirrel-girl"] } }], + createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { mediaType: { previews: ["squirrel-girl"] @@ -812,6 +830,7 @@ const Endpoints = { } }], compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { mediaType: { @@ -1135,7 +1154,7 @@ const Endpoints = { } }; -const VERSION = "5.1.1"; +const VERSION = "5.3.1"; function endpointsToMethods(octokit, endpointsMap) { const newMethods = {}; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map index 334fb164..fed4e4ea 100644 --- a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/generated/endpoints.js","../dist-src/version.js","../dist-src/endpoints-to-methods.js","../dist-src/index.js"],"sourcesContent":["const Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\",\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\",\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\",\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\",\n ],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\",\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\",\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\",\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\",\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\",\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\",\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\",\n ],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\",\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\",\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\",\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] },\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\",\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\",\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\",\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\",\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\",\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\",\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\",\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\",\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\",\n ],\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\",\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\",\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\",\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\",\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\",\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\",\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"],\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\n \"POST /content_references/{content_reference_id}/attachments\",\n { mediaType: { previews: [\"corsair\"] } },\n ],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\",\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\",\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\",\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\",\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\",\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\",\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"],\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\",\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\",\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\",\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\",\n ],\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\",\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\",\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } },\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\",\n ],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"],\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\n \"GET /codes_of_conduct\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n getConductCode: [\n \"GET /codes_of_conduct/{key}\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n getForRepo: [\n \"GET /repos/{owner}/{repo}/community/code_of_conduct\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n },\n emojis: { get: [\"GET /emojis\"] },\n enterpriseAdmin: {\n disableSelectedOrganizationGithubActionsEnterprise: [\n \"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n enableSelectedOrganizationGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n getAllowedActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n getGithubActionsPermissionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions\",\n ],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n setAllowedActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n setGithubActionsPermissionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions\",\n ],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"],\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"],\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"],\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] },\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\",\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] },\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] },\n ],\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\",\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n { mediaType: { previews: [\"mockingbird\"] } },\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\",\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"],\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } },\n ],\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"],\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getStatusForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listForAuthenticatedUser: [\n \"GET /user/migrations\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listForOrg: [\n \"GET /orgs/{org}/migrations\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listReposForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"],\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\",\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\",\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\",\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\",\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\",\n ],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"],\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\",\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] },\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\",\n ],\n },\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\",\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\",\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\",\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n },\n projects: {\n addCollaborator: [\n \"PUT /projects/{project_id}/collaborators/{username}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createCard: [\n \"POST /projects/columns/{column_id}/cards\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createColumn: [\n \"POST /projects/{project_id}/columns\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForAuthenticatedUser: [\n \"POST /user/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForOrg: [\n \"POST /orgs/{org}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForRepo: [\n \"POST /repos/{owner}/{repo}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n delete: [\n \"DELETE /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n deleteCard: [\n \"DELETE /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n deleteColumn: [\n \"DELETE /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n get: [\n \"GET /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getCard: [\n \"GET /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getColumn: [\n \"GET /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listCards: [\n \"GET /projects/columns/{column_id}/cards\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listCollaborators: [\n \"GET /projects/{project_id}/collaborators\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listColumns: [\n \"GET /projects/{project_id}/columns\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForOrg: [\n \"GET /orgs/{org}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForRepo: [\n \"GET /repos/{owner}/{repo}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForUser: [\n \"GET /users/{username}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n moveCard: [\n \"POST /projects/columns/cards/{card_id}/moves\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n moveColumn: [\n \"POST /projects/columns/{column_id}/moves\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n update: [\n \"PATCH /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n updateCard: [\n \"PATCH /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n updateColumn: [\n \"PATCH /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\",\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\",\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\",\n { mediaType: { previews: [\"lydian\"] } },\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteLegacy: [\n \"DELETE /reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n {\n deprecated: \"octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy\",\n },\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\n \"POST /repos/{owner}/{repo}/pages\",\n { mediaType: { previews: [\"switcheroo\"] } },\n ],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\",\n { mediaType: { previews: [\"baptiste\"] } },\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\",\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n deletePagesSite: [\n \"DELETE /repos/{owner}/{repo}/pages\",\n { mediaType: { previews: [\"switcheroo\"] } },\n ],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\",\n { mediaType: { previews: [\"london\"] } },\n ],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] },\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\",\n { mediaType: { previews: [\"london\"] } },\n ],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n ],\n getAllTopics: [\n \"GET /repos/{owner}/{repo}/topics\",\n { mediaType: { previews: [\"mercy\"] } },\n ],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n ],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\",\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\",\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\",\n { mediaType: { previews: [\"groot\"] } },\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n { mediaType: { previews: [\"groot\"] } },\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\",\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\n \"PUT /repos/{owner}/{repo}/topics\",\n { mediaType: { previews: [\"mercy\"] } },\n ],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] },\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" },\n ],\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\", { mediaType: { previews: [\"cloak\"] } }],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", { mediaType: { previews: [\"mercy\"] } }],\n users: [\"GET /search/users\"],\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n ],\n listProjectsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"],\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"],\n },\n};\nexport default Endpoints;\n","export const VERSION = \"5.1.1\";\n","export function endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({ method, url }, defaults);\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n const scopeMethods = newMethods[scope];\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args);\n // There are currently no other decorations than `.mapToData`\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined,\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n delete options[name];\n }\n }\n return requestWithDefaults(options);\n }\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n","import ENDPOINTS from \"./generated/endpoints\";\nimport { VERSION } from \"./version\";\nimport { endpointsToMethods } from \"./endpoints-to-methods\";\nexport function restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n rest: api,\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nexport function legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n ...api,\n rest: api,\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n"],"names":["Endpoints","actions","addSelectedRepoToOrgSecret","cancelWorkflowRun","createOrUpdateEnvironmentSecret","createOrUpdateOrgSecret","createOrUpdateRepoSecret","createRegistrationTokenForOrg","createRegistrationTokenForRepo","createRemoveTokenForOrg","createRemoveTokenForRepo","createWorkflowDispatch","deleteArtifact","deleteEnvironmentSecret","deleteOrgSecret","deleteRepoSecret","deleteSelfHostedRunnerFromOrg","deleteSelfHostedRunnerFromRepo","deleteWorkflowRun","deleteWorkflowRunLogs","disableSelectedRepositoryGithubActionsOrganization","disableWorkflow","downloadArtifact","downloadJobLogsForWorkflowRun","downloadWorkflowRunLogs","enableSelectedRepositoryGithubActionsOrganization","enableWorkflow","getAllowedActionsOrganization","getAllowedActionsRepository","getArtifact","getEnvironmentPublicKey","getEnvironmentSecret","getGithubActionsPermissionsOrganization","getGithubActionsPermissionsRepository","getJobForWorkflowRun","getOrgPublicKey","getOrgSecret","getPendingDeploymentsForRun","getRepoPermissions","renamed","getRepoPublicKey","getRepoSecret","getReviewsForRun","getSelfHostedRunnerForOrg","getSelfHostedRunnerForRepo","getWorkflow","getWorkflowRun","getWorkflowRunUsage","getWorkflowUsage","listArtifactsForRepo","listEnvironmentSecrets","listJobsForWorkflowRun","listOrgSecrets","listRepoSecrets","listRepoWorkflows","listRunnerApplicationsForOrg","listRunnerApplicationsForRepo","listSelectedReposForOrgSecret","listSelectedRepositoriesEnabledGithubActionsOrganization","listSelfHostedRunnersForOrg","listSelfHostedRunnersForRepo","listWorkflowRunArtifacts","listWorkflowRuns","listWorkflowRunsForRepo","reRunWorkflow","removeSelectedRepoFromOrgSecret","reviewPendingDeploymentsForRun","setAllowedActionsOrganization","setAllowedActionsRepository","setGithubActionsPermissionsOrganization","setGithubActionsPermissionsRepository","setSelectedReposForOrgSecret","setSelectedRepositoriesEnabledGithubActionsOrganization","activity","checkRepoIsStarredByAuthenticatedUser","deleteRepoSubscription","deleteThreadSubscription","getFeeds","getRepoSubscription","getThread","getThreadSubscriptionForAuthenticatedUser","listEventsForAuthenticatedUser","listNotificationsForAuthenticatedUser","listOrgEventsForAuthenticatedUser","listPublicEvents","listPublicEventsForRepoNetwork","listPublicEventsForUser","listPublicOrgEvents","listReceivedEventsForUser","listReceivedPublicEventsForUser","listRepoEvents","listRepoNotificationsForAuthenticatedUser","listReposStarredByAuthenticatedUser","listReposStarredByUser","listReposWatchedByUser","listStargazersForRepo","listWatchedReposForAuthenticatedUser","listWatchersForRepo","markNotificationsAsRead","markRepoNotificationsAsRead","markThreadAsRead","setRepoSubscription","setThreadSubscription","starRepoForAuthenticatedUser","unstarRepoForAuthenticatedUser","apps","addRepoToInstallation","checkToken","createContentAttachment","mediaType","previews","createFromManifest","createInstallationAccessToken","deleteAuthorization","deleteInstallation","deleteToken","getAuthenticated","getBySlug","getInstallation","getOrgInstallation","getRepoInstallation","getSubscriptionPlanForAccount","getSubscriptionPlanForAccountStubbed","getUserInstallation","getWebhookConfigForApp","listAccountsForPlan","listAccountsForPlanStubbed","listInstallationReposForAuthenticatedUser","listInstallations","listInstallationsForAuthenticatedUser","listPlans","listPlansStubbed","listReposAccessibleToInstallation","listSubscriptionsForAuthenticatedUser","listSubscriptionsForAuthenticatedUserStubbed","removeRepoFromInstallation","resetToken","revokeInstallationAccessToken","scopeToken","suspendInstallation","unsuspendInstallation","updateWebhookConfigForApp","billing","getGithubActionsBillingOrg","getGithubActionsBillingUser","getGithubPackagesBillingOrg","getGithubPackagesBillingUser","getSharedStorageBillingOrg","getSharedStorageBillingUser","checks","create","createSuite","get","getSuite","listAnnotations","listForRef","listForSuite","listSuitesForRef","rerequestSuite","setSuitesPreferences","update","codeScanning","deleteAnalysis","getAlert","renamedParameters","alert_id","getAnalysis","getSarif","listAlertsForRepo","listAlertsInstances","listRecentAnalyses","updateAlert","uploadSarif","codesOfConduct","getAllCodesOfConduct","getConductCode","getForRepo","emojis","enterpriseAdmin","disableSelectedOrganizationGithubActionsEnterprise","enableSelectedOrganizationGithubActionsEnterprise","getAllowedActionsEnterprise","getGithubActionsPermissionsEnterprise","listSelectedOrganizationsEnabledGithubActionsEnterprise","setAllowedActionsEnterprise","setGithubActionsPermissionsEnterprise","setSelectedOrganizationsEnabledGithubActionsEnterprise","gists","checkIsStarred","createComment","delete","deleteComment","fork","getComment","getRevision","list","listComments","listCommits","listForUser","listForks","listPublic","listStarred","star","unstar","updateComment","git","createBlob","createCommit","createRef","createTag","createTree","deleteRef","getBlob","getCommit","getRef","getTag","getTree","listMatchingRefs","updateRef","gitignore","getAllTemplates","getTemplate","interactions","getRestrictionsForAuthenticatedUser","getRestrictionsForOrg","getRestrictionsForRepo","getRestrictionsForYourPublicRepos","removeRestrictionsForAuthenticatedUser","removeRestrictionsForOrg","removeRestrictionsForRepo","removeRestrictionsForYourPublicRepos","setRestrictionsForAuthenticatedUser","setRestrictionsForOrg","setRestrictionsForRepo","setRestrictionsForYourPublicRepos","issues","addAssignees","addLabels","checkUserCanBeAssigned","createLabel","createMilestone","deleteLabel","deleteMilestone","getEvent","getLabel","getMilestone","listAssignees","listCommentsForRepo","listEvents","listEventsForRepo","listEventsForTimeline","listForAuthenticatedUser","listForOrg","listForRepo","listLabelsForMilestone","listLabelsForRepo","listLabelsOnIssue","listMilestones","lock","removeAllLabels","removeAssignees","removeLabel","setLabels","unlock","updateLabel","updateMilestone","licenses","getAllCommonlyUsed","markdown","render","renderRaw","headers","meta","getOctocat","getZen","root","migrations","cancelImport","deleteArchiveForAuthenticatedUser","deleteArchiveForOrg","downloadArchiveForOrg","getArchiveForAuthenticatedUser","getCommitAuthors","getImportStatus","getLargeFiles","getStatusForAuthenticatedUser","getStatusForOrg","listReposForOrg","listReposForUser","mapCommitAuthor","setLfsPreference","startForAuthenticatedUser","startForOrg","startImport","unlockRepoForAuthenticatedUser","unlockRepoForOrg","updateImport","orgs","blockUser","cancelInvitation","checkBlockedUser","checkMembershipForUser","checkPublicMembershipForUser","convertMemberToOutsideCollaborator","createInvitation","createWebhook","deleteWebhook","getMembershipForAuthenticatedUser","getMembershipForUser","getWebhook","getWebhookConfigForOrg","listAppInstallations","listBlockedUsers","listFailedInvitations","listInvitationTeams","listMembers","listMembershipsForAuthenticatedUser","listOutsideCollaborators","listPendingInvitations","listPublicMembers","listWebhooks","pingWebhook","removeMember","removeMembershipForUser","removeOutsideCollaborator","removePublicMembershipForAuthenticatedUser","setMembershipForUser","setPublicMembershipForAuthenticatedUser","unblockUser","updateMembershipForAuthenticatedUser","updateWebhook","updateWebhookConfigForOrg","packages","deletePackageForAuthenticatedUser","deletePackageForOrg","deletePackageVersionForAuthenticatedUser","deletePackageVersionForOrg","getAllPackageVersionsForAPackageOwnedByAnOrg","getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser","getAllPackageVersionsForPackageOwnedByAuthenticatedUser","getAllPackageVersionsForPackageOwnedByOrg","getAllPackageVersionsForPackageOwnedByUser","getPackageForAuthenticatedUser","getPackageForOrganization","getPackageForUser","getPackageVersionForAuthenticatedUser","getPackageVersionForOrganization","getPackageVersionForUser","restorePackageForAuthenticatedUser","restorePackageForOrg","restorePackageVersionForAuthenticatedUser","restorePackageVersionForOrg","projects","addCollaborator","createCard","createColumn","createForAuthenticatedUser","createForOrg","createForRepo","deleteCard","deleteColumn","getCard","getColumn","getPermissionForUser","listCards","listCollaborators","listColumns","moveCard","moveColumn","removeCollaborator","updateCard","updateColumn","pulls","checkIfMerged","createReplyForReviewComment","createReview","createReviewComment","deletePendingReview","deleteReviewComment","dismissReview","getReview","getReviewComment","listCommentsForReview","listFiles","listRequestedReviewers","listReviewComments","listReviewCommentsForRepo","listReviews","merge","removeRequestedReviewers","requestReviewers","submitReview","updateBranch","updateReview","updateReviewComment","rateLimit","reactions","createForCommitComment","createForIssue","createForIssueComment","createForPullRequestReviewComment","createForTeamDiscussionCommentInOrg","createForTeamDiscussionInOrg","deleteForCommitComment","deleteForIssue","deleteForIssueComment","deleteForPullRequestComment","deleteForTeamDiscussion","deleteForTeamDiscussionComment","deleteLegacy","deprecated","listForCommitComment","listForIssue","listForIssueComment","listForPullRequestReviewComment","listForTeamDiscussionCommentInOrg","listForTeamDiscussionInOrg","repos","acceptInvitation","addAppAccessRestrictions","mapToData","addStatusCheckContexts","addTeamAccessRestrictions","addUserAccessRestrictions","checkCollaborator","checkVulnerabilityAlerts","compareCommits","createCommitComment","createCommitSignatureProtection","createCommitStatus","createDeployKey","createDeployment","createDeploymentStatus","createDispatchEvent","createFork","createInOrg","createOrUpdateEnvironment","createOrUpdateFileContents","createPagesSite","createRelease","createUsingTemplate","declineInvitation","deleteAccessRestrictions","deleteAdminBranchProtection","deleteAnEnvironment","deleteBranchProtection","deleteCommitComment","deleteCommitSignatureProtection","deleteDeployKey","deleteDeployment","deleteFile","deleteInvitation","deletePagesSite","deletePullRequestReviewProtection","deleteRelease","deleteReleaseAsset","disableAutomatedSecurityFixes","disableVulnerabilityAlerts","downloadArchive","downloadTarballArchive","downloadZipballArchive","enableAutomatedSecurityFixes","enableVulnerabilityAlerts","getAccessRestrictions","getAdminBranchProtection","getAllEnvironments","getAllStatusCheckContexts","getAllTopics","getAppsWithAccessToProtectedBranch","getBranch","getBranchProtection","getClones","getCodeFrequencyStats","getCollaboratorPermissionLevel","getCombinedStatusForRef","getCommitActivityStats","getCommitComment","getCommitSignatureProtection","getCommunityProfileMetrics","getContent","getContributorsStats","getDeployKey","getDeployment","getDeploymentStatus","getEnvironment","getLatestPagesBuild","getLatestRelease","getPages","getPagesBuild","getPagesHealthCheck","getParticipationStats","getPullRequestReviewProtection","getPunchCardStats","getReadme","getReadmeInDirectory","getRelease","getReleaseAsset","getReleaseByTag","getStatusChecksProtection","getTeamsWithAccessToProtectedBranch","getTopPaths","getTopReferrers","getUsersWithAccessToProtectedBranch","getViews","getWebhookConfigForRepo","listBranches","listBranchesForHeadCommit","listCommentsForCommit","listCommitCommentsForRepo","listCommitStatusesForRef","listContributors","listDeployKeys","listDeploymentStatuses","listDeployments","listInvitations","listInvitationsForAuthenticatedUser","listLanguages","listPagesBuilds","listPullRequestsAssociatedWithCommit","listReleaseAssets","listReleases","listTags","listTeams","removeAppAccessRestrictions","removeStatusCheckContexts","removeStatusCheckProtection","removeTeamAccessRestrictions","removeUserAccessRestrictions","renameBranch","replaceAllTopics","requestPagesBuild","setAdminBranchProtection","setAppAccessRestrictions","setStatusCheckContexts","setTeamAccessRestrictions","setUserAccessRestrictions","testPushWebhook","transfer","updateBranchProtection","updateCommitComment","updateInformationAboutPagesSite","updateInvitation","updatePullRequestReviewProtection","updateRelease","updateReleaseAsset","updateStatusCheckPotection","updateStatusCheckProtection","updateWebhookConfigForRepo","uploadReleaseAsset","baseUrl","search","code","commits","issuesAndPullRequests","labels","topics","users","secretScanning","teams","addOrUpdateMembershipForUserInOrg","addOrUpdateProjectPermissionsInOrg","addOrUpdateRepoPermissionsInOrg","checkPermissionsForProjectInOrg","checkPermissionsForRepoInOrg","createDiscussionCommentInOrg","createDiscussionInOrg","deleteDiscussionCommentInOrg","deleteDiscussionInOrg","deleteInOrg","getByName","getDiscussionCommentInOrg","getDiscussionInOrg","getMembershipForUserInOrg","listChildInOrg","listDiscussionCommentsInOrg","listDiscussionsInOrg","listMembersInOrg","listPendingInvitationsInOrg","listProjectsInOrg","listReposInOrg","removeMembershipForUserInOrg","removeProjectInOrg","removeRepoInOrg","updateDiscussionCommentInOrg","updateDiscussionInOrg","updateInOrg","addEmailForAuthenticated","block","checkBlocked","checkFollowingForUser","checkPersonIsFollowedByAuthenticated","createGpgKeyForAuthenticated","createPublicSshKeyForAuthenticated","deleteEmailForAuthenticated","deleteGpgKeyForAuthenticated","deletePublicSshKeyForAuthenticated","follow","getByUsername","getContextForUser","getGpgKeyForAuthenticated","getPublicSshKeyForAuthenticated","listBlockedByAuthenticated","listEmailsForAuthenticated","listFollowedByAuthenticated","listFollowersForAuthenticatedUser","listFollowersForUser","listFollowingForUser","listGpgKeysForAuthenticated","listGpgKeysForUser","listPublicEmailsForAuthenticated","listPublicKeysForUser","listPublicSshKeysForAuthenticated","setPrimaryEmailVisibilityForAuthenticated","unblock","unfollow","updateAuthenticated","VERSION","endpointsToMethods","octokit","endpointsMap","newMethods","scope","endpoints","Object","entries","methodName","endpoint","route","defaults","decorations","method","url","split","endpointDefaults","assign","scopeMethods","decorate","request","requestWithDefaults","withDecorations","args","options","data","undefined","newScope","newMethodName","log","warn","name","alias","restEndpointMethods","api","ENDPOINTS","rest","legacyRestEndpointMethods"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,MAAMA,SAAS,GAAG;AACdC,EAAAA,OAAO,EAAE;AACLC,IAAAA,0BAA0B,EAAE,CACxB,4EADwB,CADvB;AAILC,IAAAA,iBAAiB,EAAE,CACf,yDADe,CAJd;AAOLC,IAAAA,+BAA+B,EAAE,CAC7B,yFAD6B,CAP5B;AAULC,IAAAA,uBAAuB,EAAE,CAAC,+CAAD,CAVpB;AAWLC,IAAAA,wBAAwB,EAAE,CACtB,yDADsB,CAXrB;AAcLC,IAAAA,6BAA6B,EAAE,CAC3B,qDAD2B,CAd1B;AAiBLC,IAAAA,8BAA8B,EAAE,CAC5B,+DAD4B,CAjB3B;AAoBLC,IAAAA,uBAAuB,EAAE,CAAC,+CAAD,CApBpB;AAqBLC,IAAAA,wBAAwB,EAAE,CACtB,yDADsB,CArBrB;AAwBLC,IAAAA,sBAAsB,EAAE,CACpB,uEADoB,CAxBnB;AA2BLC,IAAAA,cAAc,EAAE,CACZ,8DADY,CA3BX;AA8BLC,IAAAA,uBAAuB,EAAE,CACrB,4FADqB,CA9BpB;AAiCLC,IAAAA,eAAe,EAAE,CAAC,kDAAD,CAjCZ;AAkCLC,IAAAA,gBAAgB,EAAE,CACd,4DADc,CAlCb;AAqCLC,IAAAA,6BAA6B,EAAE,CAC3B,gDAD2B,CArC1B;AAwCLC,IAAAA,8BAA8B,EAAE,CAC5B,0DAD4B,CAxC3B;AA2CLC,IAAAA,iBAAiB,EAAE,CAAC,oDAAD,CA3Cd;AA4CLC,IAAAA,qBAAqB,EAAE,CACnB,yDADmB,CA5ClB;AA+CLC,IAAAA,kDAAkD,EAAE,CAChD,qEADgD,CA/C/C;AAkDLC,IAAAA,eAAe,EAAE,CACb,mEADa,CAlDZ;AAqDLC,IAAAA,gBAAgB,EAAE,CACd,4EADc,CArDb;AAwDLC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CAxD1B;AA2DLC,IAAAA,uBAAuB,EAAE,CACrB,sDADqB,CA3DpB;AA8DLC,IAAAA,iDAAiD,EAAE,CAC/C,kEAD+C,CA9D9C;AAiELC,IAAAA,cAAc,EAAE,CACZ,kEADY,CAjEX;AAoELC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CApE1B;AAuELC,IAAAA,2BAA2B,EAAE,CACzB,gEADyB,CAvExB;AA0ELC,IAAAA,WAAW,EAAE,CAAC,2DAAD,CA1ER;AA2ELC,IAAAA,uBAAuB,EAAE,CACrB,sFADqB,CA3EpB;AA8ELC,IAAAA,oBAAoB,EAAE,CAClB,yFADkB,CA9EjB;AAiFLC,IAAAA,uCAAuC,EAAE,CACrC,qCADqC,CAjFpC;AAoFLC,IAAAA,qCAAqC,EAAE,CACnC,+CADmC,CApFlC;AAuFLC,IAAAA,oBAAoB,EAAE,CAAC,iDAAD,CAvFjB;AAwFLC,IAAAA,eAAe,EAAE,CAAC,4CAAD,CAxFZ;AAyFLC,IAAAA,YAAY,EAAE,CAAC,+CAAD,CAzFT;AA0FLC,IAAAA,2BAA2B,EAAE,CACzB,qEADyB,CA1FxB;AA6FLC,IAAAA,kBAAkB,EAAE,CAChB,+CADgB,EAEhB,EAFgB,EAGhB;AAAEC,MAAAA,OAAO,EAAE,CAAC,SAAD,EAAY,uCAAZ;AAAX,KAHgB,CA7Ff;AAkGLC,IAAAA,gBAAgB,EAAE,CAAC,sDAAD,CAlGb;AAmGLC,IAAAA,aAAa,EAAE,CAAC,yDAAD,CAnGV;AAoGLC,IAAAA,gBAAgB,EAAE,CACd,2DADc,CApGb;AAuGLC,IAAAA,yBAAyB,EAAE,CAAC,6CAAD,CAvGtB;AAwGLC,IAAAA,0BAA0B,EAAE,CACxB,uDADwB,CAxGvB;AA2GLC,IAAAA,WAAW,EAAE,CAAC,2DAAD,CA3GR;AA4GLC,IAAAA,cAAc,EAAE,CAAC,iDAAD,CA5GX;AA6GLC,IAAAA,mBAAmB,EAAE,CACjB,wDADiB,CA7GhB;AAgHLC,IAAAA,gBAAgB,EAAE,CACd,kEADc,CAhHb;AAmHLC,IAAAA,oBAAoB,EAAE,CAAC,6CAAD,CAnHjB;AAoHLC,IAAAA,sBAAsB,EAAE,CACpB,2EADoB,CApHnB;AAuHLC,IAAAA,sBAAsB,EAAE,CACpB,sDADoB,CAvHnB;AA0HLC,IAAAA,cAAc,EAAE,CAAC,iCAAD,CA1HX;AA2HLC,IAAAA,eAAe,EAAE,CAAC,2CAAD,CA3HZ;AA4HLC,IAAAA,iBAAiB,EAAE,CAAC,6CAAD,CA5Hd;AA6HLC,IAAAA,4BAA4B,EAAE,CAAC,2CAAD,CA7HzB;AA8HLC,IAAAA,6BAA6B,EAAE,CAC3B,qDAD2B,CA9H1B;AAiILC,IAAAA,6BAA6B,EAAE,CAC3B,4DAD2B,CAjI1B;AAoILC,IAAAA,wDAAwD,EAAE,CACtD,kDADsD,CApIrD;AAuILC,IAAAA,2BAA2B,EAAE,CAAC,iCAAD,CAvIxB;AAwILC,IAAAA,4BAA4B,EAAE,CAAC,2CAAD,CAxIzB;AAyILC,IAAAA,wBAAwB,EAAE,CACtB,2DADsB,CAzIrB;AA4ILC,IAAAA,gBAAgB,EAAE,CACd,gEADc,CA5Ib;AA+ILC,IAAAA,uBAAuB,EAAE,CAAC,wCAAD,CA/IpB;AAgJLC,IAAAA,aAAa,EAAE,CAAC,wDAAD,CAhJV;AAiJLC,IAAAA,+BAA+B,EAAE,CAC7B,+EAD6B,CAjJ5B;AAoJLC,IAAAA,8BAA8B,EAAE,CAC5B,sEAD4B,CApJ3B;AAuJLC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CAvJ1B;AA0JLC,IAAAA,2BAA2B,EAAE,CACzB,gEADyB,CA1JxB;AA6JLC,IAAAA,uCAAuC,EAAE,CACrC,qCADqC,CA7JpC;AAgKLC,IAAAA,qCAAqC,EAAE,CACnC,+CADmC,CAhKlC;AAmKLC,IAAAA,4BAA4B,EAAE,CAC1B,4DAD0B,CAnKzB;AAsKLC,IAAAA,uDAAuD,EAAE,CACrD,kDADqD;AAtKpD,GADK;AA2KdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,qCAAqC,EAAE,CAAC,kCAAD,CADjC;AAENC,IAAAA,sBAAsB,EAAE,CAAC,2CAAD,CAFlB;AAGNC,IAAAA,wBAAwB,EAAE,CACtB,wDADsB,CAHpB;AAMNC,IAAAA,QAAQ,EAAE,CAAC,YAAD,CANJ;AAONC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAPf;AAQNC,IAAAA,SAAS,EAAE,CAAC,wCAAD,CARL;AASNC,IAAAA,yCAAyC,EAAE,CACvC,qDADuC,CATrC;AAYNC,IAAAA,8BAA8B,EAAE,CAAC,8BAAD,CAZ1B;AAaNC,IAAAA,qCAAqC,EAAE,CAAC,oBAAD,CAbjC;AAcNC,IAAAA,iCAAiC,EAAE,CAC/B,yCAD+B,CAd7B;AAiBNC,IAAAA,gBAAgB,EAAE,CAAC,aAAD,CAjBZ;AAkBNC,IAAAA,8BAA8B,EAAE,CAAC,qCAAD,CAlB1B;AAmBNC,IAAAA,uBAAuB,EAAE,CAAC,qCAAD,CAnBnB;AAoBNC,IAAAA,mBAAmB,EAAE,CAAC,wBAAD,CApBf;AAqBNC,IAAAA,yBAAyB,EAAE,CAAC,uCAAD,CArBrB;AAsBNC,IAAAA,+BAA+B,EAAE,CAC7B,8CAD6B,CAtB3B;AAyBNC,IAAAA,cAAc,EAAE,CAAC,kCAAD,CAzBV;AA0BNC,IAAAA,yCAAyC,EAAE,CACvC,yCADuC,CA1BrC;AA6BNC,IAAAA,mCAAmC,EAAE,CAAC,mBAAD,CA7B/B;AA8BNC,IAAAA,sBAAsB,EAAE,CAAC,+BAAD,CA9BlB;AA+BNC,IAAAA,sBAAsB,EAAE,CAAC,qCAAD,CA/BlB;AAgCNC,IAAAA,qBAAqB,EAAE,CAAC,sCAAD,CAhCjB;AAiCNC,IAAAA,oCAAoC,EAAE,CAAC,yBAAD,CAjChC;AAkCNC,IAAAA,mBAAmB,EAAE,CAAC,uCAAD,CAlCf;AAmCNC,IAAAA,uBAAuB,EAAE,CAAC,oBAAD,CAnCnB;AAoCNC,IAAAA,2BAA2B,EAAE,CAAC,yCAAD,CApCvB;AAqCNC,IAAAA,gBAAgB,EAAE,CAAC,0CAAD,CArCZ;AAsCNC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAtCf;AAuCNC,IAAAA,qBAAqB,EAAE,CACnB,qDADmB,CAvCjB;AA0CNC,IAAAA,4BAA4B,EAAE,CAAC,kCAAD,CA1CxB;AA2CNC,IAAAA,8BAA8B,EAAE,CAAC,qCAAD;AA3C1B,GA3KI;AAwNdC,EAAAA,IAAI,EAAE;AACFC,IAAAA,qBAAqB,EAAE,CACnB,wEADmB,CADrB;AAIFC,IAAAA,UAAU,EAAE,CAAC,sCAAD,CAJV;AAKFC,IAAAA,uBAAuB,EAAE,CACrB,6DADqB,EAErB;AAAEC,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFqB,CALvB;AASFC,IAAAA,kBAAkB,EAAE,CAAC,wCAAD,CATlB;AAUFC,IAAAA,6BAA6B,EAAE,CAC3B,yDAD2B,CAV7B;AAaFC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAbnB;AAcFC,IAAAA,kBAAkB,EAAE,CAAC,6CAAD,CAdlB;AAeFC,IAAAA,WAAW,EAAE,CAAC,wCAAD,CAfX;AAgBFC,IAAAA,gBAAgB,EAAE,CAAC,UAAD,CAhBhB;AAiBFC,IAAAA,SAAS,EAAE,CAAC,sBAAD,CAjBT;AAkBFC,IAAAA,eAAe,EAAE,CAAC,0CAAD,CAlBf;AAmBFC,IAAAA,kBAAkB,EAAE,CAAC,8BAAD,CAnBlB;AAoBFC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CApBnB;AAqBFC,IAAAA,6BAA6B,EAAE,CAC3B,gDAD2B,CArB7B;AAwBFC,IAAAA,oCAAoC,EAAE,CAClC,wDADkC,CAxBpC;AA2BFC,IAAAA,mBAAmB,EAAE,CAAC,oCAAD,CA3BnB;AA4BFC,IAAAA,sBAAsB,EAAE,CAAC,sBAAD,CA5BtB;AA6BFC,IAAAA,mBAAmB,EAAE,CAAC,mDAAD,CA7BnB;AA8BFC,IAAAA,0BAA0B,EAAE,CACxB,2DADwB,CA9B1B;AAiCFC,IAAAA,yCAAyC,EAAE,CACvC,wDADuC,CAjCzC;AAoCFC,IAAAA,iBAAiB,EAAE,CAAC,wBAAD,CApCjB;AAqCFC,IAAAA,qCAAqC,EAAE,CAAC,yBAAD,CArCrC;AAsCFC,IAAAA,SAAS,EAAE,CAAC,gCAAD,CAtCT;AAuCFC,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CAvChB;AAwCFC,IAAAA,iCAAiC,EAAE,CAAC,gCAAD,CAxCjC;AAyCFC,IAAAA,qCAAqC,EAAE,CAAC,iCAAD,CAzCrC;AA0CFC,IAAAA,4CAA4C,EAAE,CAC1C,yCAD0C,CA1C5C;AA6CFC,IAAAA,0BAA0B,EAAE,CACxB,2EADwB,CA7C1B;AAgDFC,IAAAA,UAAU,EAAE,CAAC,uCAAD,CAhDV;AAiDFC,IAAAA,6BAA6B,EAAE,CAAC,4BAAD,CAjD7B;AAkDFC,IAAAA,UAAU,EAAE,CAAC,6CAAD,CAlDV;AAmDFC,IAAAA,mBAAmB,EAAE,CAAC,oDAAD,CAnDnB;AAoDFC,IAAAA,qBAAqB,EAAE,CACnB,uDADmB,CApDrB;AAuDFC,IAAAA,yBAAyB,EAAE,CAAC,wBAAD;AAvDzB,GAxNQ;AAiRdC,EAAAA,OAAO,EAAE;AACLC,IAAAA,0BAA0B,EAAE,CAAC,0CAAD,CADvB;AAELC,IAAAA,2BAA2B,EAAE,CACzB,gDADyB,CAFxB;AAKLC,IAAAA,2BAA2B,EAAE,CAAC,2CAAD,CALxB;AAMLC,IAAAA,4BAA4B,EAAE,CAC1B,iDAD0B,CANzB;AASLC,IAAAA,0BAA0B,EAAE,CACxB,iDADwB,CATvB;AAYLC,IAAAA,2BAA2B,EAAE,CACzB,uDADyB;AAZxB,GAjRK;AAiSdC,EAAAA,MAAM,EAAE;AACJC,IAAAA,MAAM,EAAE,CAAC,uCAAD,CADJ;AAEJC,IAAAA,WAAW,EAAE,CAAC,yCAAD,CAFT;AAGJC,IAAAA,GAAG,EAAE,CAAC,qDAAD,CAHD;AAIJC,IAAAA,QAAQ,EAAE,CAAC,yDAAD,CAJN;AAKJC,IAAAA,eAAe,EAAE,CACb,iEADa,CALb;AAQJC,IAAAA,UAAU,EAAE,CAAC,oDAAD,CARR;AASJC,IAAAA,YAAY,EAAE,CACV,oEADU,CATV;AAYJC,IAAAA,gBAAgB,EAAE,CAAC,sDAAD,CAZd;AAaJC,IAAAA,cAAc,EAAE,CACZ,oEADY,CAbZ;AAgBJC,IAAAA,oBAAoB,EAAE,CAClB,sDADkB,CAhBlB;AAmBJC,IAAAA,MAAM,EAAE,CAAC,uDAAD;AAnBJ,GAjSM;AAsTdC,EAAAA,YAAY,EAAE;AACVC,IAAAA,cAAc,EAAE,CACZ,oFADY,CADN;AAIVC,IAAAA,QAAQ,EAAE,CACN,+DADM,EAEN,EAFM,EAGN;AAAEC,MAAAA,iBAAiB,EAAE;AAAEC,QAAAA,QAAQ,EAAE;AAAZ;AAArB,KAHM,CAJA;AASVC,IAAAA,WAAW,EAAE,CACT,gEADS,CATH;AAYVC,IAAAA,QAAQ,EAAE,CAAC,2DAAD,CAZA;AAaVC,IAAAA,iBAAiB,EAAE,CAAC,gDAAD,CAbT;AAcVC,IAAAA,mBAAmB,EAAE,CACjB,yEADiB,CAdX;AAiBVC,IAAAA,kBAAkB,EAAE,CAAC,kDAAD,CAjBV;AAkBVC,IAAAA,WAAW,EAAE,CACT,iEADS,CAlBH;AAqBVC,IAAAA,WAAW,EAAE,CAAC,iDAAD;AArBH,GAtTA;AA6UdC,EAAAA,cAAc,EAAE;AACZC,IAAAA,oBAAoB,EAAE,CAClB,uBADkB,EAElB;AAAEjE,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFkB,CADV;AAKZiE,IAAAA,cAAc,EAAE,CACZ,6BADY,EAEZ;AAAElE,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFY,CALJ;AASZkE,IAAAA,UAAU,EAAE,CACR,qDADQ,EAER;AAAEnE,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFQ;AATA,GA7UF;AA2VdmE,EAAAA,MAAM,EAAE;AAAEzB,IAAAA,GAAG,EAAE,CAAC,aAAD;AAAP,GA3VM;AA4Vd0B,EAAAA,eAAe,EAAE;AACbC,IAAAA,kDAAkD,EAAE,CAChD,6EADgD,CADvC;AAIbC,IAAAA,iDAAiD,EAAE,CAC/C,0EAD+C,CAJtC;AAObC,IAAAA,2BAA2B,EAAE,CACzB,oEADyB,CAPhB;AAUbC,IAAAA,qCAAqC,EAAE,CACnC,mDADmC,CAV1B;AAabC,IAAAA,uDAAuD,EAAE,CACrD,iEADqD,CAb5C;AAgBbC,IAAAA,2BAA2B,EAAE,CACzB,oEADyB,CAhBhB;AAmBbC,IAAAA,qCAAqC,EAAE,CACnC,mDADmC,CAnB1B;AAsBbC,IAAAA,sDAAsD,EAAE,CACpD,iEADoD;AAtB3C,GA5VH;AAsXdC,EAAAA,KAAK,EAAE;AACHC,IAAAA,cAAc,EAAE,CAAC,2BAAD,CADb;AAEHtC,IAAAA,MAAM,EAAE,CAAC,aAAD,CAFL;AAGHuC,IAAAA,aAAa,EAAE,CAAC,gCAAD,CAHZ;AAIHC,IAAAA,MAAM,EAAE,CAAC,yBAAD,CAJL;AAKHC,IAAAA,aAAa,EAAE,CAAC,+CAAD,CALZ;AAMHC,IAAAA,IAAI,EAAE,CAAC,6BAAD,CANH;AAOHxC,IAAAA,GAAG,EAAE,CAAC,sBAAD,CAPF;AAQHyC,IAAAA,UAAU,EAAE,CAAC,4CAAD,CART;AASHC,IAAAA,WAAW,EAAE,CAAC,4BAAD,CATV;AAUHC,IAAAA,IAAI,EAAE,CAAC,YAAD,CAVH;AAWHC,IAAAA,YAAY,EAAE,CAAC,+BAAD,CAXX;AAYHC,IAAAA,WAAW,EAAE,CAAC,8BAAD,CAZV;AAaHC,IAAAA,WAAW,EAAE,CAAC,6BAAD,CAbV;AAcHC,IAAAA,SAAS,EAAE,CAAC,4BAAD,CAdR;AAeHC,IAAAA,UAAU,EAAE,CAAC,mBAAD,CAfT;AAgBHC,IAAAA,WAAW,EAAE,CAAC,oBAAD,CAhBV;AAiBHC,IAAAA,IAAI,EAAE,CAAC,2BAAD,CAjBH;AAkBHC,IAAAA,MAAM,EAAE,CAAC,8BAAD,CAlBL;AAmBH3C,IAAAA,MAAM,EAAE,CAAC,wBAAD,CAnBL;AAoBH4C,IAAAA,aAAa,EAAE,CAAC,8CAAD;AApBZ,GAtXO;AA4YdC,EAAAA,GAAG,EAAE;AACDC,IAAAA,UAAU,EAAE,CAAC,sCAAD,CADX;AAEDC,IAAAA,YAAY,EAAE,CAAC,wCAAD,CAFb;AAGDC,IAAAA,SAAS,EAAE,CAAC,qCAAD,CAHV;AAIDC,IAAAA,SAAS,EAAE,CAAC,qCAAD,CAJV;AAKDC,IAAAA,UAAU,EAAE,CAAC,sCAAD,CALX;AAMDC,IAAAA,SAAS,EAAE,CAAC,6CAAD,CANV;AAODC,IAAAA,OAAO,EAAE,CAAC,gDAAD,CAPR;AAQDC,IAAAA,SAAS,EAAE,CAAC,oDAAD,CARV;AASDC,IAAAA,MAAM,EAAE,CAAC,yCAAD,CATP;AAUDC,IAAAA,MAAM,EAAE,CAAC,8CAAD,CAVP;AAWDC,IAAAA,OAAO,EAAE,CAAC,gDAAD,CAXR;AAYDC,IAAAA,gBAAgB,EAAE,CAAC,mDAAD,CAZjB;AAaDC,IAAAA,SAAS,EAAE,CAAC,4CAAD;AAbV,GA5YS;AA2ZdC,EAAAA,SAAS,EAAE;AACPC,IAAAA,eAAe,EAAE,CAAC,0BAAD,CADV;AAEPC,IAAAA,WAAW,EAAE,CAAC,iCAAD;AAFN,GA3ZG;AA+ZdC,EAAAA,YAAY,EAAE;AACVC,IAAAA,mCAAmC,EAAE,CAAC,8BAAD,CAD3B;AAEVC,IAAAA,qBAAqB,EAAE,CAAC,oCAAD,CAFb;AAGVC,IAAAA,sBAAsB,EAAE,CAAC,8CAAD,CAHd;AAIVC,IAAAA,iCAAiC,EAAE,CAC/B,8BAD+B,EAE/B,EAF+B,EAG/B;AAAE3L,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,qCAAjB;AAAX,KAH+B,CAJzB;AASV4L,IAAAA,sCAAsC,EAAE,CAAC,iCAAD,CAT9B;AAUVC,IAAAA,wBAAwB,EAAE,CAAC,uCAAD,CAVhB;AAWVC,IAAAA,yBAAyB,EAAE,CACvB,iDADuB,CAXjB;AAcVC,IAAAA,oCAAoC,EAAE,CAClC,iCADkC,EAElC,EAFkC,EAGlC;AAAE/L,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,wCAAjB;AAAX,KAHkC,CAd5B;AAmBVgM,IAAAA,mCAAmC,EAAE,CAAC,8BAAD,CAnB3B;AAoBVC,IAAAA,qBAAqB,EAAE,CAAC,oCAAD,CApBb;AAqBVC,IAAAA,sBAAsB,EAAE,CAAC,8CAAD,CArBd;AAsBVC,IAAAA,iCAAiC,EAAE,CAC/B,8BAD+B,EAE/B,EAF+B,EAG/B;AAAEnM,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,qCAAjB;AAAX,KAH+B;AAtBzB,GA/ZA;AA2bdoM,EAAAA,MAAM,EAAE;AACJC,IAAAA,YAAY,EAAE,CACV,4DADU,CADV;AAIJC,IAAAA,SAAS,EAAE,CAAC,yDAAD,CAJP;AAKJC,IAAAA,sBAAsB,EAAE,CAAC,gDAAD,CALpB;AAMJxF,IAAAA,MAAM,EAAE,CAAC,mCAAD,CANJ;AAOJuC,IAAAA,aAAa,EAAE,CACX,2DADW,CAPX;AAUJkD,IAAAA,WAAW,EAAE,CAAC,mCAAD,CAVT;AAWJC,IAAAA,eAAe,EAAE,CAAC,uCAAD,CAXb;AAYJjD,IAAAA,aAAa,EAAE,CACX,2DADW,CAZX;AAeJkD,IAAAA,WAAW,EAAE,CAAC,4CAAD,CAfT;AAgBJC,IAAAA,eAAe,EAAE,CACb,4DADa,CAhBb;AAmBJ1F,IAAAA,GAAG,EAAE,CAAC,iDAAD,CAnBD;AAoBJyC,IAAAA,UAAU,EAAE,CAAC,wDAAD,CApBR;AAqBJkD,IAAAA,QAAQ,EAAE,CAAC,oDAAD,CArBN;AAsBJC,IAAAA,QAAQ,EAAE,CAAC,yCAAD,CAtBN;AAuBJC,IAAAA,YAAY,EAAE,CAAC,yDAAD,CAvBV;AAwBJlD,IAAAA,IAAI,EAAE,CAAC,aAAD,CAxBF;AAyBJmD,IAAAA,aAAa,EAAE,CAAC,qCAAD,CAzBX;AA0BJlD,IAAAA,YAAY,EAAE,CAAC,0DAAD,CA1BV;AA2BJmD,IAAAA,mBAAmB,EAAE,CAAC,2CAAD,CA3BjB;AA4BJC,IAAAA,UAAU,EAAE,CAAC,wDAAD,CA5BR;AA6BJC,IAAAA,iBAAiB,EAAE,CAAC,yCAAD,CA7Bf;AA8BJC,IAAAA,qBAAqB,EAAE,CACnB,0DADmB,EAEnB;AAAE7I,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,aAAD;AAAZ;AAAb,KAFmB,CA9BnB;AAkCJ6I,IAAAA,wBAAwB,EAAE,CAAC,kBAAD,CAlCtB;AAmCJC,IAAAA,UAAU,EAAE,CAAC,wBAAD,CAnCR;AAoCJC,IAAAA,WAAW,EAAE,CAAC,kCAAD,CApCT;AAqCJC,IAAAA,sBAAsB,EAAE,CACpB,gEADoB,CArCpB;AAwCJC,IAAAA,iBAAiB,EAAE,CAAC,kCAAD,CAxCf;AAyCJC,IAAAA,iBAAiB,EAAE,CACf,wDADe,CAzCf;AA4CJC,IAAAA,cAAc,EAAE,CAAC,sCAAD,CA5CZ;AA6CJC,IAAAA,IAAI,EAAE,CAAC,sDAAD,CA7CF;AA8CJC,IAAAA,eAAe,EAAE,CACb,2DADa,CA9Cb;AAiDJC,IAAAA,eAAe,EAAE,CACb,8DADa,CAjDb;AAoDJC,IAAAA,WAAW,EAAE,CACT,kEADS,CApDT;AAuDJC,IAAAA,SAAS,EAAE,CAAC,wDAAD,CAvDP;AAwDJC,IAAAA,MAAM,EAAE,CAAC,yDAAD,CAxDJ;AAyDJvG,IAAAA,MAAM,EAAE,CAAC,mDAAD,CAzDJ;AA0DJ4C,IAAAA,aAAa,EAAE,CAAC,0DAAD,CA1DX;AA2DJ4D,IAAAA,WAAW,EAAE,CAAC,2CAAD,CA3DT;AA4DJC,IAAAA,eAAe,EAAE,CACb,2DADa;AA5Db,GA3bM;AA2fdC,EAAAA,QAAQ,EAAE;AACNlH,IAAAA,GAAG,EAAE,CAAC,yBAAD,CADC;AAENmH,IAAAA,kBAAkB,EAAE,CAAC,eAAD,CAFd;AAGN3F,IAAAA,UAAU,EAAE,CAAC,mCAAD;AAHN,GA3fI;AAggBd4F,EAAAA,QAAQ,EAAE;AACNC,IAAAA,MAAM,EAAE,CAAC,gBAAD,CADF;AAENC,IAAAA,SAAS,EAAE,CACP,oBADO,EAEP;AAAEC,MAAAA,OAAO,EAAE;AAAE,wBAAgB;AAAlB;AAAX,KAFO;AAFL,GAhgBI;AAugBdC,EAAAA,IAAI,EAAE;AACFxH,IAAAA,GAAG,EAAE,CAAC,WAAD,CADH;AAEFyH,IAAAA,UAAU,EAAE,CAAC,cAAD,CAFV;AAGFC,IAAAA,MAAM,EAAE,CAAC,UAAD,CAHN;AAIFC,IAAAA,IAAI,EAAE,CAAC,OAAD;AAJJ,GAvgBQ;AA6gBdC,EAAAA,UAAU,EAAE;AACRC,IAAAA,YAAY,EAAE,CAAC,qCAAD,CADN;AAERC,IAAAA,iCAAiC,EAAE,CAC/B,gDAD+B,EAE/B;AAAEzK,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAF+B,CAF3B;AAMRyK,IAAAA,mBAAmB,EAAE,CACjB,sDADiB,EAEjB;AAAE1K,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFiB,CANb;AAUR0K,IAAAA,qBAAqB,EAAE,CACnB,mDADmB,EAEnB;AAAE3K,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFmB,CAVf;AAcR2K,IAAAA,8BAA8B,EAAE,CAC5B,6CAD4B,EAE5B;AAAE5K,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAF4B,CAdxB;AAkBR4K,IAAAA,gBAAgB,EAAE,CAAC,0CAAD,CAlBV;AAmBRC,IAAAA,eAAe,EAAE,CAAC,kCAAD,CAnBT;AAoBRC,IAAAA,aAAa,EAAE,CAAC,8CAAD,CApBP;AAqBRC,IAAAA,6BAA6B,EAAE,CAC3B,qCAD2B,EAE3B;AAAEhL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAF2B,CArBvB;AAyBRgL,IAAAA,eAAe,EAAE,CACb,2CADa,EAEb;AAAEjL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFa,CAzBT;AA6BR6I,IAAAA,wBAAwB,EAAE,CACtB,sBADsB,EAEtB;AAAE9I,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFsB,CA7BlB;AAiCR8I,IAAAA,UAAU,EAAE,CACR,4BADQ,EAER;AAAE/I,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFQ,CAjCJ;AAqCRiL,IAAAA,eAAe,EAAE,CACb,wDADa,EAEb;AAAElL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFa,CArCT;AAyCRkL,IAAAA,gBAAgB,EAAE,CACd,kDADc,EAEd;AAAEnL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFc,CAzCV;AA6CRmL,IAAAA,eAAe,EAAE,CAAC,wDAAD,CA7CT;AA8CRC,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CA9CV;AA+CRC,IAAAA,yBAAyB,EAAE,CAAC,uBAAD,CA/CnB;AAgDRC,IAAAA,WAAW,EAAE,CAAC,6BAAD,CAhDL;AAiDRC,IAAAA,WAAW,EAAE,CAAC,kCAAD,CAjDL;AAkDRC,IAAAA,8BAA8B,EAAE,CAC5B,+DAD4B,EAE5B;AAAEzL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAF4B,CAlDxB;AAsDRyL,IAAAA,gBAAgB,EAAE,CACd,qEADc,EAEd;AAAE1L,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFc,CAtDV;AA0DR0L,IAAAA,YAAY,EAAE,CAAC,oCAAD;AA1DN,GA7gBE;AAykBdC,EAAAA,IAAI,EAAE;AACFC,IAAAA,SAAS,EAAE,CAAC,mCAAD,CADT;AAEFC,IAAAA,gBAAgB,EAAE,CAAC,gDAAD,CAFhB;AAGFC,IAAAA,gBAAgB,EAAE,CAAC,mCAAD,CAHhB;AAIFC,IAAAA,sBAAsB,EAAE,CAAC,oCAAD,CAJtB;AAKFC,IAAAA,4BAA4B,EAAE,CAAC,2CAAD,CAL5B;AAMFC,IAAAA,kCAAkC,EAAE,CAChC,kDADgC,CANlC;AASFC,IAAAA,gBAAgB,EAAE,CAAC,8BAAD,CAThB;AAUFC,IAAAA,aAAa,EAAE,CAAC,wBAAD,CAVb;AAWFC,IAAAA,aAAa,EAAE,CAAC,oCAAD,CAXb;AAYF1J,IAAAA,GAAG,EAAE,CAAC,iBAAD,CAZH;AAaF2J,IAAAA,iCAAiC,EAAE,CAAC,kCAAD,CAbjC;AAcFC,IAAAA,oBAAoB,EAAE,CAAC,wCAAD,CAdpB;AAeFC,IAAAA,UAAU,EAAE,CAAC,iCAAD,CAfV;AAgBFC,IAAAA,sBAAsB,EAAE,CAAC,wCAAD,CAhBtB;AAiBFnH,IAAAA,IAAI,EAAE,CAAC,oBAAD,CAjBJ;AAkBFoH,IAAAA,oBAAoB,EAAE,CAAC,+BAAD,CAlBpB;AAmBFC,IAAAA,gBAAgB,EAAE,CAAC,wBAAD,CAnBhB;AAoBFC,IAAAA,qBAAqB,EAAE,CAAC,oCAAD,CApBrB;AAqBF9D,IAAAA,wBAAwB,EAAE,CAAC,gBAAD,CArBxB;AAsBFrD,IAAAA,WAAW,EAAE,CAAC,4BAAD,CAtBX;AAuBFoH,IAAAA,mBAAmB,EAAE,CAAC,mDAAD,CAvBnB;AAwBFC,IAAAA,WAAW,EAAE,CAAC,yBAAD,CAxBX;AAyBFC,IAAAA,mCAAmC,EAAE,CAAC,4BAAD,CAzBnC;AA0BFC,IAAAA,wBAAwB,EAAE,CAAC,uCAAD,CA1BxB;AA2BFC,IAAAA,sBAAsB,EAAE,CAAC,6BAAD,CA3BtB;AA4BFC,IAAAA,iBAAiB,EAAE,CAAC,gCAAD,CA5BjB;AA6BFC,IAAAA,YAAY,EAAE,CAAC,uBAAD,CA7BZ;AA8BFC,IAAAA,WAAW,EAAE,CAAC,wCAAD,CA9BX;AA+BFC,IAAAA,YAAY,EAAE,CAAC,uCAAD,CA/BZ;AAgCFC,IAAAA,uBAAuB,EAAE,CAAC,2CAAD,CAhCvB;AAiCFC,IAAAA,yBAAyB,EAAE,CACvB,qDADuB,CAjCzB;AAoCFC,IAAAA,0CAA0C,EAAE,CACxC,8CADwC,CApC1C;AAuCFC,IAAAA,oBAAoB,EAAE,CAAC,wCAAD,CAvCpB;AAwCFC,IAAAA,uCAAuC,EAAE,CACrC,2CADqC,CAxCvC;AA2CFC,IAAAA,WAAW,EAAE,CAAC,sCAAD,CA3CX;AA4CFxK,IAAAA,MAAM,EAAE,CAAC,mBAAD,CA5CN;AA6CFyK,IAAAA,oCAAoC,EAAE,CAClC,oCADkC,CA7CpC;AAgDFC,IAAAA,aAAa,EAAE,CAAC,mCAAD,CAhDb;AAiDFC,IAAAA,yBAAyB,EAAE,CAAC,0CAAD;AAjDzB,GAzkBQ;AA4nBdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,iCAAiC,EAAE,CAC/B,qDAD+B,CAD7B;AAINC,IAAAA,mBAAmB,EAAE,CACjB,2DADiB,CAJf;AAONC,IAAAA,wCAAwC,EAAE,CACtC,mFADsC,CAPpC;AAUNC,IAAAA,0BAA0B,EAAE,CACxB,yFADwB,CAVtB;AAaNC,IAAAA,4CAA4C,EAAE,CAC1C,iEAD0C,EAE1C,EAF0C,EAG1C;AAAE1S,MAAAA,OAAO,EAAE,CAAC,UAAD,EAAa,2CAAb;AAAX,KAH0C,CAbxC;AAkBN2S,IAAAA,2DAA2D,EAAE,CACzD,2DADyD,EAEzD,EAFyD,EAGzD;AACI3S,MAAAA,OAAO,EAAE,CACL,UADK,EAEL,yDAFK;AADb,KAHyD,CAlBvD;AA4BN4S,IAAAA,uDAAuD,EAAE,CACrD,2DADqD,CA5BnD;AA+BNC,IAAAA,yCAAyC,EAAE,CACvC,iEADuC,CA/BrC;AAkCNC,IAAAA,0CAA0C,EAAE,CACxC,uEADwC,CAlCtC;AAqCNC,IAAAA,8BAA8B,EAAE,CAC5B,kDAD4B,CArC1B;AAwCNC,IAAAA,yBAAyB,EAAE,CACvB,wDADuB,CAxCrB;AA2CNC,IAAAA,iBAAiB,EAAE,CACf,8DADe,CA3Cb;AA8CNC,IAAAA,qCAAqC,EAAE,CACnC,gFADmC,CA9CjC;AAiDNC,IAAAA,gCAAgC,EAAE,CAC9B,sFAD8B,CAjD5B;AAoDNC,IAAAA,wBAAwB,EAAE,CACtB,4FADsB,CApDpB;AAuDNC,IAAAA,kCAAkC,EAAE,CAChC,mEADgC,CAvD9B;AA0DNC,IAAAA,oBAAoB,EAAE,CAClB,yEADkB,CA1DhB;AA6DNC,IAAAA,yCAAyC,EAAE,CACvC,yFADuC,CA7DrC;AAgENC,IAAAA,2BAA2B,EAAE,CACzB,+FADyB;AAhEvB,GA5nBI;AAgsBdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,eAAe,EAAE,CACb,qDADa,EAEb;AAAEpP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFa,CADX;AAKNoP,IAAAA,UAAU,EAAE,CACR,0CADQ,EAER;AAAErP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CALN;AASNqP,IAAAA,YAAY,EAAE,CACV,qCADU,EAEV;AAAEtP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFU,CATR;AAaNsP,IAAAA,0BAA0B,EAAE,CACxB,qBADwB,EAExB;AAAEvP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFwB,CAbtB;AAiBNuP,IAAAA,YAAY,EAAE,CACV,2BADU,EAEV;AAAExP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFU,CAjBR;AAqBNwP,IAAAA,aAAa,EAAE,CACX,qCADW,EAEX;AAAEzP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFW,CArBT;AAyBNgF,IAAAA,MAAM,EAAE,CACJ,+BADI,EAEJ;AAAEjF,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFI,CAzBF;AA6BNyP,IAAAA,UAAU,EAAE,CACR,0CADQ,EAER;AAAE1P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CA7BN;AAiCN0P,IAAAA,YAAY,EAAE,CACV,sCADU,EAEV;AAAE3P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFU,CAjCR;AAqCN0C,IAAAA,GAAG,EAAE,CACD,4BADC,EAED;AAAE3C,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFC,CArCC;AAyCN2P,IAAAA,OAAO,EAAE,CACL,uCADK,EAEL;AAAE5P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFK,CAzCH;AA6CN4P,IAAAA,SAAS,EAAE,CACP,mCADO,EAEP;AAAE7P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFO,CA7CL;AAiDN6P,IAAAA,oBAAoB,EAAE,CAClB,gEADkB,EAElB;AAAE9P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFkB,CAjDhB;AAqDN8P,IAAAA,SAAS,EAAE,CACP,yCADO,EAEP;AAAE/P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFO,CArDL;AAyDN+P,IAAAA,iBAAiB,EAAE,CACf,0CADe,EAEf;AAAEhQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFe,CAzDb;AA6DNgQ,IAAAA,WAAW,EAAE,CACT,oCADS,EAET;AAAEjQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFS,CA7DP;AAiEN8I,IAAAA,UAAU,EAAE,CACR,0BADQ,EAER;AAAE/I,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CAjEN;AAqEN+I,IAAAA,WAAW,EAAE,CACT,oCADS,EAET;AAAEhJ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFS,CArEP;AAyENwF,IAAAA,WAAW,EAAE,CACT,gCADS,EAET;AAAEzF,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFS,CAzEP;AA6ENiQ,IAAAA,QAAQ,EAAE,CACN,8CADM,EAEN;AAAElQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFM,CA7EJ;AAiFNkQ,IAAAA,UAAU,EAAE,CACR,0CADQ,EAER;AAAEnQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CAjFN;AAqFNmQ,IAAAA,kBAAkB,EAAE,CAChB,wDADgB,EAEhB;AAAEpQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFgB,CArFd;AAyFNkD,IAAAA,MAAM,EAAE,CACJ,8BADI,EAEJ;AAAEnD,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFI,CAzFF;AA6FNoQ,IAAAA,UAAU,EAAE,CACR,yCADQ,EAER;AAAErQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CA7FN;AAiGNqQ,IAAAA,YAAY,EAAE,CACV,qCADU,EAEV;AAAEtQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFU;AAjGR,GAhsBI;AAsyBdsQ,EAAAA,KAAK,EAAE;AACHC,IAAAA,aAAa,EAAE,CAAC,qDAAD,CADZ;AAEH/N,IAAAA,MAAM,EAAE,CAAC,kCAAD,CAFL;AAGHgO,IAAAA,2BAA2B,EAAE,CACzB,8EADyB,CAH1B;AAMHC,IAAAA,YAAY,EAAE,CAAC,wDAAD,CANX;AAOHC,IAAAA,mBAAmB,EAAE,CACjB,yDADiB,CAPlB;AAUHC,IAAAA,mBAAmB,EAAE,CACjB,sEADiB,CAVlB;AAaHC,IAAAA,mBAAmB,EAAE,CACjB,0DADiB,CAblB;AAgBHC,IAAAA,aAAa,EAAE,CACX,8EADW,CAhBZ;AAmBHnO,IAAAA,GAAG,EAAE,CAAC,+CAAD,CAnBF;AAoBHoO,IAAAA,SAAS,EAAE,CACP,mEADO,CApBR;AAuBHC,IAAAA,gBAAgB,EAAE,CAAC,uDAAD,CAvBf;AAwBH1L,IAAAA,IAAI,EAAE,CAAC,iCAAD,CAxBH;AAyBH2L,IAAAA,qBAAqB,EAAE,CACnB,4EADmB,CAzBpB;AA4BHzL,IAAAA,WAAW,EAAE,CAAC,uDAAD,CA5BV;AA6BH0L,IAAAA,SAAS,EAAE,CAAC,qDAAD,CA7BR;AA8BHC,IAAAA,sBAAsB,EAAE,CACpB,mEADoB,CA9BrB;AAiCHC,IAAAA,kBAAkB,EAAE,CAChB,wDADgB,CAjCjB;AAoCHC,IAAAA,yBAAyB,EAAE,CAAC,0CAAD,CApCxB;AAqCHC,IAAAA,WAAW,EAAE,CAAC,uDAAD,CArCV;AAsCHC,IAAAA,KAAK,EAAE,CAAC,qDAAD,CAtCJ;AAuCHC,IAAAA,wBAAwB,EAAE,CACtB,sEADsB,CAvCvB;AA0CHC,IAAAA,gBAAgB,EAAE,CACd,oEADc,CA1Cf;AA6CHC,IAAAA,YAAY,EAAE,CACV,2EADU,CA7CX;AAgDHvO,IAAAA,MAAM,EAAE,CAAC,iDAAD,CAhDL;AAiDHwO,IAAAA,YAAY,EAAE,CACV,6DADU,EAEV;AAAE3R,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAFU,CAjDX;AAqDH2R,IAAAA,YAAY,EAAE,CACV,mEADU,CArDX;AAwDHC,IAAAA,mBAAmB,EAAE,CACjB,yDADiB;AAxDlB,GAtyBO;AAk2BdC,EAAAA,SAAS,EAAE;AAAEnP,IAAAA,GAAG,EAAE,CAAC,iBAAD;AAAP,GAl2BG;AAm2BdoP,EAAAA,SAAS,EAAE;AACPC,IAAAA,sBAAsB,EAAE,CACpB,4DADoB,EAEpB;AAAEhS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFoB,CADjB;AAKPgS,IAAAA,cAAc,EAAE,CACZ,4DADY,EAEZ;AAAEjS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFY,CALT;AASPiS,IAAAA,qBAAqB,EAAE,CACnB,mEADmB,EAEnB;AAAElS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFmB,CAThB;AAaPkS,IAAAA,iCAAiC,EAAE,CAC/B,kEAD+B,EAE/B;AAAEnS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF+B,CAb5B;AAiBPmS,IAAAA,mCAAmC,EAAE,CACjC,wGADiC,EAEjC;AAAEpS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFiC,CAjB9B;AAqBPoS,IAAAA,4BAA4B,EAAE,CAC1B,8EAD0B,EAE1B;AAAErS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF0B,CArBvB;AAyBPqS,IAAAA,sBAAsB,EAAE,CACpB,4EADoB,EAEpB;AAAEtS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFoB,CAzBjB;AA6BPsS,IAAAA,cAAc,EAAE,CACZ,4EADY,EAEZ;AAAEvS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFY,CA7BT;AAiCPuS,IAAAA,qBAAqB,EAAE,CACnB,mFADmB,EAEnB;AAAExS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFmB,CAjChB;AAqCPwS,IAAAA,2BAA2B,EAAE,CACzB,kFADyB,EAEzB;AAAEzS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFyB,CArCtB;AAyCPyS,IAAAA,uBAAuB,EAAE,CACrB,8FADqB,EAErB;AAAE1S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFqB,CAzClB;AA6CP0S,IAAAA,8BAA8B,EAAE,CAC5B,wHAD4B,EAE5B;AAAE3S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF4B,CA7CzB;AAiDP2S,IAAAA,YAAY,EAAE,CACV,iCADU,EAEV;AAAE5S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFU,EAGV;AACI4S,MAAAA,UAAU,EAAE;AADhB,KAHU,CAjDP;AAwDPC,IAAAA,oBAAoB,EAAE,CAClB,2DADkB,EAElB;AAAE9S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFkB,CAxDf;AA4DP8S,IAAAA,YAAY,EAAE,CACV,2DADU,EAEV;AAAE/S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFU,CA5DP;AAgEP+S,IAAAA,mBAAmB,EAAE,CACjB,kEADiB,EAEjB;AAAEhT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFiB,CAhEd;AAoEPgT,IAAAA,+BAA+B,EAAE,CAC7B,iEAD6B,EAE7B;AAAEjT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF6B,CApE1B;AAwEPiT,IAAAA,iCAAiC,EAAE,CAC/B,uGAD+B,EAE/B;AAAElT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF+B,CAxE5B;AA4EPkT,IAAAA,0BAA0B,EAAE,CACxB,6EADwB,EAExB;AAAEnT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFwB;AA5ErB,GAn2BG;AAo7BdmT,EAAAA,KAAK,EAAE;AACHC,IAAAA,gBAAgB,EAAE,CAAC,oDAAD,CADf;AAEHC,IAAAA,wBAAwB,EAAE,CACtB,2EADsB,EAEtB,EAFsB,EAGtB;AAAEC,MAAAA,SAAS,EAAE;AAAb,KAHsB,CAFvB;AAOHnE,IAAAA,eAAe,EAAE,CAAC,oDAAD,CAPd;AAQHoE,IAAAA,sBAAsB,EAAE,CACpB,yFADoB,EAEpB,EAFoB,EAGpB;AAAED,MAAAA,SAAS,EAAE;AAAb,KAHoB,CARrB;AAaHE,IAAAA,yBAAyB,EAAE,CACvB,4EADuB,EAEvB,EAFuB,EAGvB;AAAEF,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAbxB;AAkBHG,IAAAA,yBAAyB,EAAE,CACvB,4EADuB,EAEvB,EAFuB,EAGvB;AAAEH,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAlBxB;AAuBHI,IAAAA,iBAAiB,EAAE,CAAC,oDAAD,CAvBhB;AAwBHC,IAAAA,wBAAwB,EAAE,CACtB,gDADsB,EAEtB;AAAE5T,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAFsB,CAxBvB;AA4BH4T,IAAAA,cAAc,EAAE,CAAC,mDAAD,CA5Bb;AA6BHC,IAAAA,mBAAmB,EAAE,CACjB,0DADiB,CA7BlB;AAgCHC,IAAAA,+BAA+B,EAAE,CAC7B,6EAD6B,EAE7B;AAAE/T,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAF6B,CAhC9B;AAoCH+T,IAAAA,kBAAkB,EAAE,CAAC,2CAAD,CApCjB;AAqCHC,IAAAA,eAAe,EAAE,CAAC,iCAAD,CArCd;AAsCHC,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CAtCf;AAuCHC,IAAAA,sBAAsB,EAAE,CACpB,iEADoB,CAvCrB;AA0CHC,IAAAA,mBAAmB,EAAE,CAAC,uCAAD,CA1ClB;AA2CH7E,IAAAA,0BAA0B,EAAE,CAAC,kBAAD,CA3CzB;AA4CH8E,IAAAA,UAAU,EAAE,CAAC,kCAAD,CA5CT;AA6CHC,IAAAA,WAAW,EAAE,CAAC,wBAAD,CA7CV;AA8CHC,IAAAA,yBAAyB,EAAE,CACvB,2DADuB,CA9CxB;AAiDHC,IAAAA,0BAA0B,EAAE,CAAC,2CAAD,CAjDzB;AAkDHC,IAAAA,eAAe,EAAE,CACb,kCADa,EAEb;AAAEzU,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,YAAD;AAAZ;AAAb,KAFa,CAlDd;AAsDHyU,IAAAA,aAAa,EAAE,CAAC,qCAAD,CAtDZ;AAuDHC,IAAAA,mBAAmB,EAAE,CACjB,uDADiB,EAEjB;AAAE3U,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,UAAD;AAAZ;AAAb,KAFiB,CAvDlB;AA2DHmM,IAAAA,aAAa,EAAE,CAAC,kCAAD,CA3DZ;AA4DHwI,IAAAA,iBAAiB,EAAE,CAAC,qDAAD,CA5DhB;AA6DH3P,IAAAA,MAAM,EAAE,CAAC,8BAAD,CA7DL;AA8DH4P,IAAAA,wBAAwB,EAAE,CACtB,wEADsB,CA9DvB;AAiEHC,IAAAA,2BAA2B,EAAE,CACzB,0EADyB,CAjE1B;AAoEHC,IAAAA,mBAAmB,EAAE,CACjB,8DADiB,CApElB;AAuEHC,IAAAA,sBAAsB,EAAE,CACpB,2DADoB,CAvErB;AA0EHC,IAAAA,mBAAmB,EAAE,CAAC,oDAAD,CA1ElB;AA2EHC,IAAAA,+BAA+B,EAAE,CAC7B,+EAD6B,EAE7B;AAAElV,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAF6B,CA3E9B;AA+EHkV,IAAAA,eAAe,EAAE,CAAC,4CAAD,CA/Ed;AAgFHC,IAAAA,gBAAgB,EAAE,CACd,0DADc,CAhFf;AAmFHC,IAAAA,UAAU,EAAE,CAAC,8CAAD,CAnFT;AAoFHC,IAAAA,gBAAgB,EAAE,CACd,0DADc,CApFf;AAuFHC,IAAAA,eAAe,EAAE,CACb,oCADa,EAEb;AAAEvV,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,YAAD;AAAZ;AAAb,KAFa,CAvFd;AA2FHuV,IAAAA,iCAAiC,EAAE,CAC/B,yFAD+B,CA3FhC;AA8FHC,IAAAA,aAAa,EAAE,CAAC,oDAAD,CA9FZ;AA+FHC,IAAAA,kBAAkB,EAAE,CAChB,yDADgB,CA/FjB;AAkGHrJ,IAAAA,aAAa,EAAE,CAAC,8CAAD,CAlGZ;AAmGHsJ,IAAAA,6BAA6B,EAAE,CAC3B,uDAD2B,EAE3B;AAAE3V,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAF2B,CAnG5B;AAuGH2V,IAAAA,0BAA0B,EAAE,CACxB,mDADwB,EAExB;AAAE5V,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAFwB,CAvGzB;AA2GH4V,IAAAA,eAAe,EAAE,CACb,yCADa,EAEb,EAFa,EAGb;AAAEna,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,wBAAV;AAAX,KAHa,CA3Gd;AAgHHoa,IAAAA,sBAAsB,EAAE,CAAC,yCAAD,CAhHrB;AAiHHC,IAAAA,sBAAsB,EAAE,CAAC,yCAAD,CAjHrB;AAkHHC,IAAAA,4BAA4B,EAAE,CAC1B,oDAD0B,EAE1B;AAAEhW,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAF0B,CAlH3B;AAsHHgW,IAAAA,yBAAyB,EAAE,CACvB,gDADuB,EAEvB;AAAEjW,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAFuB,CAtHxB;AA0HH0C,IAAAA,GAAG,EAAE,CAAC,2BAAD,CA1HF;AA2HHuT,IAAAA,qBAAqB,EAAE,CACnB,qEADmB,CA3HpB;AA8HHC,IAAAA,wBAAwB,EAAE,CACtB,uEADsB,CA9HvB;AAiIHC,IAAAA,kBAAkB,EAAE,CAAC,wCAAD,CAjIjB;AAkIHC,IAAAA,yBAAyB,EAAE,CACvB,wFADuB,CAlIxB;AAqIHC,IAAAA,YAAY,EAAE,CACV,kCADU,EAEV;AAAEtW,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAFU,CArIX;AAyIHsW,IAAAA,kCAAkC,EAAE,CAChC,0EADgC,CAzIjC;AA4IHC,IAAAA,SAAS,EAAE,CAAC,6CAAD,CA5IR;AA6IHC,IAAAA,mBAAmB,EAAE,CACjB,wDADiB,CA7IlB;AAgJHC,IAAAA,SAAS,EAAE,CAAC,0CAAD,CAhJR;AAiJHC,IAAAA,qBAAqB,EAAE,CAAC,gDAAD,CAjJpB;AAkJHC,IAAAA,8BAA8B,EAAE,CAC5B,+DAD4B,CAlJ7B;AAqJHC,IAAAA,uBAAuB,EAAE,CAAC,gDAAD,CArJtB;AAsJHrQ,IAAAA,SAAS,EAAE,CAAC,yCAAD,CAtJR;AAuJHsQ,IAAAA,sBAAsB,EAAE,CAAC,iDAAD,CAvJrB;AAwJHC,IAAAA,gBAAgB,EAAE,CAAC,iDAAD,CAxJf;AAyJHC,IAAAA,4BAA4B,EAAE,CAC1B,4EAD0B,EAE1B;AAAEhX,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAF0B,CAzJ3B;AA6JHgX,IAAAA,0BAA0B,EAAE,CAAC,6CAAD,CA7JzB;AA8JHC,IAAAA,UAAU,EAAE,CAAC,2CAAD,CA9JT;AA+JHC,IAAAA,oBAAoB,EAAE,CAAC,8CAAD,CA/JnB;AAgKHC,IAAAA,YAAY,EAAE,CAAC,yCAAD,CAhKX;AAiKHC,IAAAA,aAAa,EAAE,CAAC,uDAAD,CAjKZ;AAkKHC,IAAAA,mBAAmB,EAAE,CACjB,4EADiB,CAlKlB;AAqKHC,IAAAA,cAAc,EAAE,CACZ,2DADY,CArKb;AAwKHC,IAAAA,mBAAmB,EAAE,CAAC,+CAAD,CAxKlB;AAyKHC,IAAAA,gBAAgB,EAAE,CAAC,2CAAD,CAzKf;AA0KHC,IAAAA,QAAQ,EAAE,CAAC,iCAAD,CA1KP;AA2KHC,IAAAA,aAAa,EAAE,CAAC,mDAAD,CA3KZ;AA4KHC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CA5KlB;AA6KHC,IAAAA,qBAAqB,EAAE,CAAC,+CAAD,CA7KpB;AA8KHC,IAAAA,8BAA8B,EAAE,CAC5B,sFAD4B,CA9K7B;AAiLHC,IAAAA,iBAAiB,EAAE,CAAC,4CAAD,CAjLhB;AAkLHC,IAAAA,SAAS,EAAE,CAAC,kCAAD,CAlLR;AAmLHC,IAAAA,oBAAoB,EAAE,CAAC,wCAAD,CAnLnB;AAoLHC,IAAAA,UAAU,EAAE,CAAC,iDAAD,CApLT;AAqLHC,IAAAA,eAAe,EAAE,CAAC,sDAAD,CArLd;AAsLHC,IAAAA,eAAe,EAAE,CAAC,+CAAD,CAtLd;AAuLHC,IAAAA,yBAAyB,EAAE,CACvB,+EADuB,CAvLxB;AA0LHC,IAAAA,mCAAmC,EAAE,CACjC,2EADiC,CA1LlC;AA6LHC,IAAAA,WAAW,EAAE,CAAC,iDAAD,CA7LV;AA8LHC,IAAAA,eAAe,EAAE,CAAC,qDAAD,CA9Ld;AA+LHC,IAAAA,mCAAmC,EAAE,CACjC,2EADiC,CA/LlC;AAkMHC,IAAAA,QAAQ,EAAE,CAAC,yCAAD,CAlMP;AAmMHlM,IAAAA,UAAU,EAAE,CAAC,2CAAD,CAnMT;AAoMHmM,IAAAA,uBAAuB,EAAE,CACrB,kDADqB,CApMtB;AAuMHC,IAAAA,YAAY,EAAE,CAAC,oCAAD,CAvMX;AAwMHC,IAAAA,yBAAyB,EAAE,CACvB,oEADuB,EAEvB;AAAE7Y,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAFuB,CAxMxB;AA4MH+P,IAAAA,iBAAiB,EAAE,CAAC,yCAAD,CA5MhB;AA6MH8I,IAAAA,qBAAqB,EAAE,CACnB,yDADmB,CA7MpB;AAgNHC,IAAAA,yBAAyB,EAAE,CAAC,oCAAD,CAhNxB;AAiNHC,IAAAA,wBAAwB,EAAE,CACtB,kDADsB,CAjNvB;AAoNHxT,IAAAA,WAAW,EAAE,CAAC,mCAAD,CApNV;AAqNHyT,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CArNf;AAsNHC,IAAAA,cAAc,EAAE,CAAC,gCAAD,CAtNb;AAuNHC,IAAAA,sBAAsB,EAAE,CACpB,gEADoB,CAvNrB;AA0NHC,IAAAA,eAAe,EAAE,CAAC,uCAAD,CA1Nd;AA2NHtQ,IAAAA,wBAAwB,EAAE,CAAC,iBAAD,CA3NvB;AA4NHC,IAAAA,UAAU,EAAE,CAAC,uBAAD,CA5NT;AA6NHtD,IAAAA,WAAW,EAAE,CAAC,6BAAD,CA7NV;AA8NHC,IAAAA,SAAS,EAAE,CAAC,iCAAD,CA9NR;AA+NH2T,IAAAA,eAAe,EAAE,CAAC,uCAAD,CA/Nd;AAgOHC,IAAAA,mCAAmC,EAAE,CAAC,kCAAD,CAhOlC;AAiOHC,IAAAA,aAAa,EAAE,CAAC,qCAAD,CAjOZ;AAkOHC,IAAAA,eAAe,EAAE,CAAC,wCAAD,CAlOd;AAmOH7T,IAAAA,UAAU,EAAE,CAAC,mBAAD,CAnOT;AAoOH8T,IAAAA,oCAAoC,EAAE,CAClC,sDADkC,EAElC;AAAEzZ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAFkC,CApOnC;AAwOHyZ,IAAAA,iBAAiB,EAAE,CACf,wDADe,CAxOhB;AA2OHC,IAAAA,YAAY,EAAE,CAAC,oCAAD,CA3OX;AA4OHC,IAAAA,QAAQ,EAAE,CAAC,gCAAD,CA5OP;AA6OHC,IAAAA,SAAS,EAAE,CAAC,iCAAD,CA7OR;AA8OH1M,IAAAA,YAAY,EAAE,CAAC,iCAAD,CA9OX;AA+OHoE,IAAAA,KAAK,EAAE,CAAC,mCAAD,CA/OJ;AAgPHnE,IAAAA,WAAW,EAAE,CAAC,kDAAD,CAhPV;AAiPH0M,IAAAA,2BAA2B,EAAE,CACzB,6EADyB,EAEzB,EAFyB,EAGzB;AAAEvG,MAAAA,SAAS,EAAE;AAAb,KAHyB,CAjP1B;AAsPHnD,IAAAA,kBAAkB,EAAE,CAChB,uDADgB,CAtPjB;AAyPH2J,IAAAA,yBAAyB,EAAE,CACvB,2FADuB,EAEvB,EAFuB,EAGvB;AAAExG,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAzPxB;AA8PHyG,IAAAA,2BAA2B,EAAE,CACzB,kFADyB,CA9P1B;AAiQHC,IAAAA,4BAA4B,EAAE,CAC1B,8EAD0B,EAE1B,EAF0B,EAG1B;AAAE1G,MAAAA,SAAS,EAAE;AAAb,KAH0B,CAjQ3B;AAsQH2G,IAAAA,4BAA4B,EAAE,CAC1B,8EAD0B,EAE1B,EAF0B,EAG1B;AAAE3G,MAAAA,SAAS,EAAE;AAAb,KAH0B,CAtQ3B;AA2QH4G,IAAAA,YAAY,EAAE,CAAC,qDAAD,CA3QX;AA4QHC,IAAAA,gBAAgB,EAAE,CACd,kCADc,EAEd;AAAEpa,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAFc,CA5Qf;AAgRHoa,IAAAA,iBAAiB,EAAE,CAAC,yCAAD,CAhRhB;AAiRHC,IAAAA,wBAAwB,EAAE,CACtB,wEADsB,CAjRvB;AAoRHC,IAAAA,wBAAwB,EAAE,CACtB,0EADsB,EAEtB,EAFsB,EAGtB;AAAEhH,MAAAA,SAAS,EAAE;AAAb,KAHsB,CApRvB;AAyRHiH,IAAAA,sBAAsB,EAAE,CACpB,wFADoB,EAEpB,EAFoB,EAGpB;AAAEjH,MAAAA,SAAS,EAAE;AAAb,KAHoB,CAzRrB;AA8RHkH,IAAAA,yBAAyB,EAAE,CACvB,2EADuB,EAEvB,EAFuB,EAGvB;AAAElH,MAAAA,SAAS,EAAE;AAAb,KAHuB,CA9RxB;AAmSHmH,IAAAA,yBAAyB,EAAE,CACvB,2EADuB,EAEvB,EAFuB,EAGvB;AAAEnH,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAnSxB;AAwSHoH,IAAAA,eAAe,EAAE,CAAC,kDAAD,CAxSd;AAySHC,IAAAA,QAAQ,EAAE,CAAC,qCAAD,CAzSP;AA0SHzX,IAAAA,MAAM,EAAE,CAAC,6BAAD,CA1SL;AA2SH0X,IAAAA,sBAAsB,EAAE,CACpB,wDADoB,CA3SrB;AA8SHC,IAAAA,mBAAmB,EAAE,CAAC,mDAAD,CA9SlB;AA+SHC,IAAAA,+BAA+B,EAAE,CAAC,iCAAD,CA/S9B;AAgTHC,IAAAA,gBAAgB,EAAE,CACd,yDADc,CAhTf;AAmTHC,IAAAA,iCAAiC,EAAE,CAC/B,wFAD+B,CAnThC;AAsTHC,IAAAA,aAAa,EAAE,CAAC,mDAAD,CAtTZ;AAuTHC,IAAAA,kBAAkB,EAAE,CAChB,wDADgB,CAvTjB;AA0THC,IAAAA,0BAA0B,EAAE,CACxB,iFADwB,EAExB,EAFwB,EAGxB;AAAE1f,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,6BAAV;AAAX,KAHwB,CA1TzB;AA+TH2f,IAAAA,2BAA2B,EAAE,CACzB,iFADyB,CA/T1B;AAkUHxN,IAAAA,aAAa,EAAE,CAAC,6CAAD,CAlUZ;AAmUHyN,IAAAA,0BAA0B,EAAE,CACxB,oDADwB,CAnUzB;AAsUHC,IAAAA,kBAAkB,EAAE,CAChB,sEADgB,EAEhB;AAAEC,MAAAA,OAAO,EAAE;AAAX,KAFgB;AAtUjB,GAp7BO;AA+vCdC,EAAAA,MAAM,EAAE;AACJC,IAAAA,IAAI,EAAE,CAAC,kBAAD,CADF;AAEJC,IAAAA,OAAO,EAAE,CAAC,qBAAD,EAAwB;AAAE3b,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAAxB,CAFL;AAGJ2b,IAAAA,qBAAqB,EAAE,CAAC,oBAAD,CAHnB;AAIJC,IAAAA,MAAM,EAAE,CAAC,oBAAD,CAJJ;AAKJzI,IAAAA,KAAK,EAAE,CAAC,0BAAD,CALH;AAMJ0I,IAAAA,MAAM,EAAE,CAAC,oBAAD,EAAuB;AAAE9b,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAAvB,CANJ;AAOJ8b,IAAAA,KAAK,EAAE,CAAC,mBAAD;AAPH,GA/vCM;AAwwCdC,EAAAA,cAAc,EAAE;AACZ1Y,IAAAA,QAAQ,EAAE,CACN,iEADM,CADE;AAIZK,IAAAA,iBAAiB,EAAE,CAAC,kDAAD,CAJP;AAKZG,IAAAA,WAAW,EAAE,CACT,mEADS;AALD,GAxwCF;AAixCdmY,EAAAA,KAAK,EAAE;AACHC,IAAAA,iCAAiC,EAAE,CAC/B,0DAD+B,CADhC;AAIHC,IAAAA,kCAAkC,EAAE,CAChC,yDADgC,EAEhC;AAAEnc,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFgC,CAJjC;AAQHmc,IAAAA,+BAA+B,EAAE,CAC7B,wDAD6B,CAR9B;AAWHC,IAAAA,+BAA+B,EAAE,CAC7B,yDAD6B,EAE7B;AAAErc,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAF6B,CAX9B;AAeHqc,IAAAA,4BAA4B,EAAE,CAC1B,wDAD0B,CAf3B;AAkBH7Z,IAAAA,MAAM,EAAE,CAAC,wBAAD,CAlBL;AAmBH8Z,IAAAA,4BAA4B,EAAE,CAC1B,6EAD0B,CAnB3B;AAsBHC,IAAAA,qBAAqB,EAAE,CAAC,gDAAD,CAtBpB;AAuBHC,IAAAA,4BAA4B,EAAE,CAC1B,gGAD0B,CAvB3B;AA0BHC,IAAAA,qBAAqB,EAAE,CACnB,sEADmB,CA1BpB;AA6BHC,IAAAA,WAAW,EAAE,CAAC,sCAAD,CA7BV;AA8BHC,IAAAA,SAAS,EAAE,CAAC,mCAAD,CA9BR;AA+BHC,IAAAA,yBAAyB,EAAE,CACvB,6FADuB,CA/BxB;AAkCHC,IAAAA,kBAAkB,EAAE,CAChB,mEADgB,CAlCjB;AAqCHC,IAAAA,yBAAyB,EAAE,CACvB,0DADuB,CArCxB;AAwCHzX,IAAAA,IAAI,EAAE,CAAC,uBAAD,CAxCH;AAyCH0X,IAAAA,cAAc,EAAE,CAAC,yCAAD,CAzCb;AA0CHC,IAAAA,2BAA2B,EAAE,CACzB,4EADyB,CA1C1B;AA6CHC,IAAAA,oBAAoB,EAAE,CAAC,+CAAD,CA7CnB;AA8CHpU,IAAAA,wBAAwB,EAAE,CAAC,iBAAD,CA9CvB;AA+CHqU,IAAAA,gBAAgB,EAAE,CAAC,2CAAD,CA/Cf;AAgDHC,IAAAA,2BAA2B,EAAE,CACzB,+CADyB,CAhD1B;AAmDHC,IAAAA,iBAAiB,EAAE,CACf,4CADe,EAEf;AAAErd,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFe,CAnDhB;AAuDHqd,IAAAA,cAAc,EAAE,CAAC,yCAAD,CAvDb;AAwDHC,IAAAA,4BAA4B,EAAE,CAC1B,6DAD0B,CAxD3B;AA2DHC,IAAAA,kBAAkB,EAAE,CAChB,4DADgB,CA3DjB;AA8DHC,IAAAA,eAAe,EAAE,CACb,2DADa,CA9Dd;AAiEHC,IAAAA,4BAA4B,EAAE,CAC1B,+FAD0B,CAjE3B;AAoEHC,IAAAA,qBAAqB,EAAE,CACnB,qEADmB,CApEpB;AAuEHC,IAAAA,WAAW,EAAE,CAAC,qCAAD;AAvEV,GAjxCO;AA01Cd7B,EAAAA,KAAK,EAAE;AACH8B,IAAAA,wBAAwB,EAAE,CAAC,mBAAD,CADvB;AAEHC,IAAAA,KAAK,EAAE,CAAC,6BAAD,CAFJ;AAGHC,IAAAA,YAAY,EAAE,CAAC,6BAAD,CAHX;AAIHC,IAAAA,qBAAqB,EAAE,CAAC,+CAAD,CAJpB;AAKHC,IAAAA,oCAAoC,EAAE,CAAC,gCAAD,CALnC;AAMHC,IAAAA,4BAA4B,EAAE,CAAC,qBAAD,CAN3B;AAOHC,IAAAA,kCAAkC,EAAE,CAAC,iBAAD,CAPjC;AAQHC,IAAAA,2BAA2B,EAAE,CAAC,qBAAD,CAR1B;AASHC,IAAAA,4BAA4B,EAAE,CAAC,oCAAD,CAT3B;AAUHC,IAAAA,kCAAkC,EAAE,CAAC,4BAAD,CAVjC;AAWHC,IAAAA,MAAM,EAAE,CAAC,gCAAD,CAXL;AAYHhe,IAAAA,gBAAgB,EAAE,CAAC,WAAD,CAZf;AAaHie,IAAAA,aAAa,EAAE,CAAC,uBAAD,CAbZ;AAcHC,IAAAA,iBAAiB,EAAE,CAAC,iCAAD,CAdhB;AAeHC,IAAAA,yBAAyB,EAAE,CAAC,iCAAD,CAfxB;AAgBHC,IAAAA,+BAA+B,EAAE,CAAC,yBAAD,CAhB9B;AAiBHrZ,IAAAA,IAAI,EAAE,CAAC,YAAD,CAjBH;AAkBHsZ,IAAAA,0BAA0B,EAAE,CAAC,kBAAD,CAlBzB;AAmBHC,IAAAA,0BAA0B,EAAE,CAAC,kBAAD,CAnBzB;AAoBHC,IAAAA,2BAA2B,EAAE,CAAC,qBAAD,CApB1B;AAqBHC,IAAAA,iCAAiC,EAAE,CAAC,qBAAD,CArBhC;AAsBHC,IAAAA,oBAAoB,EAAE,CAAC,iCAAD,CAtBnB;AAuBHC,IAAAA,oBAAoB,EAAE,CAAC,iCAAD,CAvBnB;AAwBHC,IAAAA,2BAA2B,EAAE,CAAC,oBAAD,CAxB1B;AAyBHC,IAAAA,kBAAkB,EAAE,CAAC,gCAAD,CAzBjB;AA0BHC,IAAAA,gCAAgC,EAAE,CAAC,yBAAD,CA1B/B;AA2BHC,IAAAA,qBAAqB,EAAE,CAAC,4BAAD,CA3BpB;AA4BHC,IAAAA,iCAAiC,EAAE,CAAC,gBAAD,CA5BhC;AA6BHC,IAAAA,yCAAyC,EAAE,CAAC,8BAAD,CA7BxC;AA8BHC,IAAAA,OAAO,EAAE,CAAC,gCAAD,CA9BN;AA+BHC,IAAAA,QAAQ,EAAE,CAAC,mCAAD,CA/BP;AAgCHC,IAAAA,mBAAmB,EAAE,CAAC,aAAD;AAhClB;AA11CO,CAAlB;;ACAO,MAAMC,OAAO,GAAG,mBAAhB;;ACAA,SAASC,kBAAT,CAA4BC,OAA5B,EAAqCC,YAArC,EAAmD;AACtD,QAAMC,UAAU,GAAG,EAAnB;;AACA,OAAK,MAAM,CAACC,KAAD,EAAQC,SAAR,CAAX,IAAiCC,MAAM,CAACC,OAAP,CAAeL,YAAf,CAAjC,EAA+D;AAC3D,SAAK,MAAM,CAACM,UAAD,EAAaC,QAAb,CAAX,IAAqCH,MAAM,CAACC,OAAP,CAAeF,SAAf,CAArC,EAAgE;AAC5D,YAAM,CAACK,KAAD,EAAQC,QAAR,EAAkBC,WAAlB,IAAiCH,QAAvC;AACA,YAAM,CAACI,MAAD,EAASC,GAAT,IAAgBJ,KAAK,CAACK,KAAN,CAAY,GAAZ,CAAtB;AACA,YAAMC,gBAAgB,GAAGV,MAAM,CAACW,MAAP,CAAc;AAAEJ,QAAAA,MAAF;AAAUC,QAAAA;AAAV,OAAd,EAA+BH,QAA/B,CAAzB;;AACA,UAAI,CAACR,UAAU,CAACC,KAAD,CAAf,EAAwB;AACpBD,QAAAA,UAAU,CAACC,KAAD,CAAV,GAAoB,EAApB;AACH;;AACD,YAAMc,YAAY,GAAGf,UAAU,CAACC,KAAD,CAA/B;;AACA,UAAIQ,WAAJ,EAAiB;AACbM,QAAAA,YAAY,CAACV,UAAD,CAAZ,GAA2BW,QAAQ,CAAClB,OAAD,EAAUG,KAAV,EAAiBI,UAAjB,EAA6BQ,gBAA7B,EAA+CJ,WAA/C,CAAnC;AACA;AACH;;AACDM,MAAAA,YAAY,CAACV,UAAD,CAAZ,GAA2BP,OAAO,CAACmB,OAAR,CAAgBT,QAAhB,CAAyBK,gBAAzB,CAA3B;AACH;AACJ;;AACD,SAAOb,UAAP;AACH;;AACD,SAASgB,QAAT,CAAkBlB,OAAlB,EAA2BG,KAA3B,EAAkCI,UAAlC,EAA8CG,QAA9C,EAAwDC,WAAxD,EAAqE;AACjE,QAAMS,mBAAmB,GAAGpB,OAAO,CAACmB,OAAR,CAAgBT,QAAhB,CAAyBA,QAAzB,CAA5B;AACA;;AACA,WAASW,eAAT,CAAyB,GAAGC,IAA5B,EAAkC;AAC9B;AACA,QAAIC,OAAO,GAAGH,mBAAmB,CAACZ,QAApB,CAA6B9O,KAA7B,CAAmC,GAAG4P,IAAtC,CAAd,CAF8B;;AAI9B,QAAIX,WAAW,CAACjN,SAAhB,EAA2B;AACvB6N,MAAAA,OAAO,GAAGlB,MAAM,CAACW,MAAP,CAAc,EAAd,EAAkBO,OAAlB,EAA2B;AACjCC,QAAAA,IAAI,EAAED,OAAO,CAACZ,WAAW,CAACjN,SAAb,CADoB;AAEjC,SAACiN,WAAW,CAACjN,SAAb,GAAyB+N;AAFQ,OAA3B,CAAV;AAIA,aAAOL,mBAAmB,CAACG,OAAD,CAA1B;AACH;;AACD,QAAIZ,WAAW,CAAC9kB,OAAhB,EAAyB;AACrB,YAAM,CAAC6lB,QAAD,EAAWC,aAAX,IAA4BhB,WAAW,CAAC9kB,OAA9C;AACAmkB,MAAAA,OAAO,CAAC4B,GAAR,CAAYC,IAAZ,CAAkB,WAAU1B,KAAM,IAAGI,UAAW,kCAAiCmB,QAAS,IAAGC,aAAc,IAA3G;AACH;;AACD,QAAIhB,WAAW,CAAC3N,UAAhB,EAA4B;AACxBgN,MAAAA,OAAO,CAAC4B,GAAR,CAAYC,IAAZ,CAAiBlB,WAAW,CAAC3N,UAA7B;AACH;;AACD,QAAI2N,WAAW,CAACjd,iBAAhB,EAAmC;AAC/B;AACA,YAAM6d,OAAO,GAAGH,mBAAmB,CAACZ,QAApB,CAA6B9O,KAA7B,CAAmC,GAAG4P,IAAtC,CAAhB;;AACA,WAAK,MAAM,CAACQ,IAAD,EAAOC,KAAP,CAAX,IAA4B1B,MAAM,CAACC,OAAP,CAAeK,WAAW,CAACjd,iBAA3B,CAA5B,EAA2E;AACvE,YAAIoe,IAAI,IAAIP,OAAZ,EAAqB;AACjBvB,UAAAA,OAAO,CAAC4B,GAAR,CAAYC,IAAZ,CAAkB,IAAGC,IAAK,0CAAyC3B,KAAM,IAAGI,UAAW,aAAYwB,KAAM,WAAzG;;AACA,cAAI,EAAEA,KAAK,IAAIR,OAAX,CAAJ,EAAyB;AACrBA,YAAAA,OAAO,CAACQ,KAAD,CAAP,GAAiBR,OAAO,CAACO,IAAD,CAAxB;AACH;;AACD,iBAAOP,OAAO,CAACO,IAAD,CAAd;AACH;AACJ;;AACD,aAAOV,mBAAmB,CAACG,OAAD,CAA1B;AACH,KA/B6B;;;AAiC9B,WAAOH,mBAAmB,CAAC,GAAGE,IAAJ,CAA1B;AACH;;AACD,SAAOjB,MAAM,CAACW,MAAP,CAAcK,eAAd,EAA+BD,mBAA/B,CAAP;AACH;;ACxDM,SAASY,mBAAT,CAA6BhC,OAA7B,EAAsC;AACzC,QAAMiC,GAAG,GAAGlC,kBAAkB,CAACC,OAAD,EAAUkC,SAAV,CAA9B;AACA,SAAO;AACHC,IAAAA,IAAI,EAAEF;AADH,GAAP;AAGH;AACDD,mBAAmB,CAAClC,OAApB,GAA8BA,OAA9B;AACA,AAAO,SAASsC,yBAAT,CAAmCpC,OAAnC,EAA4C;AAC/C,QAAMiC,GAAG,GAAGlC,kBAAkB,CAACC,OAAD,EAAUkC,SAAV,CAA9B;AACA,2CACOD,GADP;AAEIE,IAAAA,IAAI,EAAEF;AAFV;AAIH;AACDG,yBAAyB,CAACtC,OAA1B,GAAoCA,OAApC;;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/generated/endpoints.js","../dist-src/version.js","../dist-src/endpoints-to-methods.js","../dist-src/index.js"],"sourcesContent":["const Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n approveWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\",\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\",\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\",\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\",\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\",\n ],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\",\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\",\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\",\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\",\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\",\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\",\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\",\n ],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\",\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\",\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\",\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] },\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\",\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\",\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\",\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\",\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\",\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\",\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\",\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\",\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\",\n ],\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\",\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\",\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\",\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\",\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\",\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\",\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"],\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\n \"POST /content_references/{content_reference_id}/attachments\",\n { mediaType: { previews: [\"corsair\"] } },\n ],\n createContentAttachmentForRepo: [\n \"POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments\",\n { mediaType: { previews: [\"corsair\"] } },\n ],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\",\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\",\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\",\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\",\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\",\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\",\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"],\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\",\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\",\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\",\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\",\n ],\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\",\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\",\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } },\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\",\n ],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n ],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n {},\n { renamed: [\"codeScanning\", \"listAlertInstances\"] },\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"],\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\n \"GET /codes_of_conduct\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n getConductCode: [\n \"GET /codes_of_conduct/{key}\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n getForRepo: [\n \"GET /repos/{owner}/{repo}/community/code_of_conduct\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n },\n emojis: { get: [\"GET /emojis\"] },\n enterpriseAdmin: {\n disableSelectedOrganizationGithubActionsEnterprise: [\n \"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n enableSelectedOrganizationGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n getAllowedActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n getGithubActionsPermissionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions\",\n ],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n setAllowedActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n setGithubActionsPermissionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions\",\n ],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"],\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"],\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"],\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] },\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\",\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] },\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] },\n ],\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\",\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n { mediaType: { previews: [\"mockingbird\"] } },\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\",\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"],\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } },\n ],\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"],\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getStatusForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listForAuthenticatedUser: [\n \"GET /user/migrations\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listForOrg: [\n \"GET /orgs/{org}/migrations\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listReposForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"],\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\",\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\",\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\",\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\",\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\",\n ],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"],\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\",\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] },\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\",\n ],\n },\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\",\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\",\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\",\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n },\n projects: {\n addCollaborator: [\n \"PUT /projects/{project_id}/collaborators/{username}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createCard: [\n \"POST /projects/columns/{column_id}/cards\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createColumn: [\n \"POST /projects/{project_id}/columns\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForAuthenticatedUser: [\n \"POST /user/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForOrg: [\n \"POST /orgs/{org}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForRepo: [\n \"POST /repos/{owner}/{repo}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n delete: [\n \"DELETE /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n deleteCard: [\n \"DELETE /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n deleteColumn: [\n \"DELETE /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n get: [\n \"GET /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getCard: [\n \"GET /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getColumn: [\n \"GET /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listCards: [\n \"GET /projects/columns/{column_id}/cards\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listCollaborators: [\n \"GET /projects/{project_id}/collaborators\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listColumns: [\n \"GET /projects/{project_id}/columns\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForOrg: [\n \"GET /orgs/{org}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForRepo: [\n \"GET /repos/{owner}/{repo}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForUser: [\n \"GET /users/{username}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n moveCard: [\n \"POST /projects/columns/cards/{card_id}/moves\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n moveColumn: [\n \"POST /projects/columns/{column_id}/moves\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n update: [\n \"PATCH /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n updateCard: [\n \"PATCH /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n updateColumn: [\n \"PATCH /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\",\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\",\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\",\n { mediaType: { previews: [\"lydian\"] } },\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForRelease: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteLegacy: [\n \"DELETE /reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n {\n deprecated: \"octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy\",\n },\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\n \"GET /repos/{owner}/{repo}/compare/{basehead}\",\n ],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\n \"POST /repos/{owner}/{repo}/pages\",\n { mediaType: { previews: [\"switcheroo\"] } },\n ],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\",\n { mediaType: { previews: [\"baptiste\"] } },\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\",\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n deletePagesSite: [\n \"DELETE /repos/{owner}/{repo}/pages\",\n { mediaType: { previews: [\"switcheroo\"] } },\n ],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\",\n { mediaType: { previews: [\"london\"] } },\n ],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] },\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\",\n { mediaType: { previews: [\"london\"] } },\n ],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n ],\n getAllTopics: [\n \"GET /repos/{owner}/{repo}/topics\",\n { mediaType: { previews: [\"mercy\"] } },\n ],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n ],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\",\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\",\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\",\n { mediaType: { previews: [\"groot\"] } },\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n { mediaType: { previews: [\"groot\"] } },\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\",\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\n \"PUT /repos/{owner}/{repo}/topics\",\n { mediaType: { previews: [\"mercy\"] } },\n ],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] },\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" },\n ],\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\", { mediaType: { previews: [\"cloak\"] } }],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", { mediaType: { previews: [\"mercy\"] } }],\n users: [\"GET /search/users\"],\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n ],\n listProjectsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"],\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"],\n },\n};\nexport default Endpoints;\n","export const VERSION = \"5.3.1\";\n","export function endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({ method, url }, defaults);\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n const scopeMethods = newMethods[scope];\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args);\n // There are currently no other decorations than `.mapToData`\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined,\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n delete options[name];\n }\n }\n return requestWithDefaults(options);\n }\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n","import ENDPOINTS from \"./generated/endpoints\";\nimport { VERSION } from \"./version\";\nimport { endpointsToMethods } from \"./endpoints-to-methods\";\nexport function restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n rest: api,\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nexport function legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n ...api,\n rest: api,\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n"],"names":["Endpoints","actions","addSelectedRepoToOrgSecret","approveWorkflowRun","cancelWorkflowRun","createOrUpdateEnvironmentSecret","createOrUpdateOrgSecret","createOrUpdateRepoSecret","createRegistrationTokenForOrg","createRegistrationTokenForRepo","createRemoveTokenForOrg","createRemoveTokenForRepo","createWorkflowDispatch","deleteArtifact","deleteEnvironmentSecret","deleteOrgSecret","deleteRepoSecret","deleteSelfHostedRunnerFromOrg","deleteSelfHostedRunnerFromRepo","deleteWorkflowRun","deleteWorkflowRunLogs","disableSelectedRepositoryGithubActionsOrganization","disableWorkflow","downloadArtifact","downloadJobLogsForWorkflowRun","downloadWorkflowRunLogs","enableSelectedRepositoryGithubActionsOrganization","enableWorkflow","getAllowedActionsOrganization","getAllowedActionsRepository","getArtifact","getEnvironmentPublicKey","getEnvironmentSecret","getGithubActionsPermissionsOrganization","getGithubActionsPermissionsRepository","getJobForWorkflowRun","getOrgPublicKey","getOrgSecret","getPendingDeploymentsForRun","getRepoPermissions","renamed","getRepoPublicKey","getRepoSecret","getReviewsForRun","getSelfHostedRunnerForOrg","getSelfHostedRunnerForRepo","getWorkflow","getWorkflowRun","getWorkflowRunUsage","getWorkflowUsage","listArtifactsForRepo","listEnvironmentSecrets","listJobsForWorkflowRun","listOrgSecrets","listRepoSecrets","listRepoWorkflows","listRunnerApplicationsForOrg","listRunnerApplicationsForRepo","listSelectedReposForOrgSecret","listSelectedRepositoriesEnabledGithubActionsOrganization","listSelfHostedRunnersForOrg","listSelfHostedRunnersForRepo","listWorkflowRunArtifacts","listWorkflowRuns","listWorkflowRunsForRepo","reRunWorkflow","removeSelectedRepoFromOrgSecret","reviewPendingDeploymentsForRun","setAllowedActionsOrganization","setAllowedActionsRepository","setGithubActionsPermissionsOrganization","setGithubActionsPermissionsRepository","setSelectedReposForOrgSecret","setSelectedRepositoriesEnabledGithubActionsOrganization","activity","checkRepoIsStarredByAuthenticatedUser","deleteRepoSubscription","deleteThreadSubscription","getFeeds","getRepoSubscription","getThread","getThreadSubscriptionForAuthenticatedUser","listEventsForAuthenticatedUser","listNotificationsForAuthenticatedUser","listOrgEventsForAuthenticatedUser","listPublicEvents","listPublicEventsForRepoNetwork","listPublicEventsForUser","listPublicOrgEvents","listReceivedEventsForUser","listReceivedPublicEventsForUser","listRepoEvents","listRepoNotificationsForAuthenticatedUser","listReposStarredByAuthenticatedUser","listReposStarredByUser","listReposWatchedByUser","listStargazersForRepo","listWatchedReposForAuthenticatedUser","listWatchersForRepo","markNotificationsAsRead","markRepoNotificationsAsRead","markThreadAsRead","setRepoSubscription","setThreadSubscription","starRepoForAuthenticatedUser","unstarRepoForAuthenticatedUser","apps","addRepoToInstallation","checkToken","createContentAttachment","mediaType","previews","createContentAttachmentForRepo","createFromManifest","createInstallationAccessToken","deleteAuthorization","deleteInstallation","deleteToken","getAuthenticated","getBySlug","getInstallation","getOrgInstallation","getRepoInstallation","getSubscriptionPlanForAccount","getSubscriptionPlanForAccountStubbed","getUserInstallation","getWebhookConfigForApp","listAccountsForPlan","listAccountsForPlanStubbed","listInstallationReposForAuthenticatedUser","listInstallations","listInstallationsForAuthenticatedUser","listPlans","listPlansStubbed","listReposAccessibleToInstallation","listSubscriptionsForAuthenticatedUser","listSubscriptionsForAuthenticatedUserStubbed","removeRepoFromInstallation","resetToken","revokeInstallationAccessToken","scopeToken","suspendInstallation","unsuspendInstallation","updateWebhookConfigForApp","billing","getGithubActionsBillingOrg","getGithubActionsBillingUser","getGithubPackagesBillingOrg","getGithubPackagesBillingUser","getSharedStorageBillingOrg","getSharedStorageBillingUser","checks","create","createSuite","get","getSuite","listAnnotations","listForRef","listForSuite","listSuitesForRef","rerequestSuite","setSuitesPreferences","update","codeScanning","deleteAnalysis","getAlert","renamedParameters","alert_id","getAnalysis","getSarif","listAlertInstances","listAlertsForRepo","listAlertsInstances","listRecentAnalyses","updateAlert","uploadSarif","codesOfConduct","getAllCodesOfConduct","getConductCode","getForRepo","emojis","enterpriseAdmin","disableSelectedOrganizationGithubActionsEnterprise","enableSelectedOrganizationGithubActionsEnterprise","getAllowedActionsEnterprise","getGithubActionsPermissionsEnterprise","listSelectedOrganizationsEnabledGithubActionsEnterprise","setAllowedActionsEnterprise","setGithubActionsPermissionsEnterprise","setSelectedOrganizationsEnabledGithubActionsEnterprise","gists","checkIsStarred","createComment","delete","deleteComment","fork","getComment","getRevision","list","listComments","listCommits","listForUser","listForks","listPublic","listStarred","star","unstar","updateComment","git","createBlob","createCommit","createRef","createTag","createTree","deleteRef","getBlob","getCommit","getRef","getTag","getTree","listMatchingRefs","updateRef","gitignore","getAllTemplates","getTemplate","interactions","getRestrictionsForAuthenticatedUser","getRestrictionsForOrg","getRestrictionsForRepo","getRestrictionsForYourPublicRepos","removeRestrictionsForAuthenticatedUser","removeRestrictionsForOrg","removeRestrictionsForRepo","removeRestrictionsForYourPublicRepos","setRestrictionsForAuthenticatedUser","setRestrictionsForOrg","setRestrictionsForRepo","setRestrictionsForYourPublicRepos","issues","addAssignees","addLabels","checkUserCanBeAssigned","createLabel","createMilestone","deleteLabel","deleteMilestone","getEvent","getLabel","getMilestone","listAssignees","listCommentsForRepo","listEvents","listEventsForRepo","listEventsForTimeline","listForAuthenticatedUser","listForOrg","listForRepo","listLabelsForMilestone","listLabelsForRepo","listLabelsOnIssue","listMilestones","lock","removeAllLabels","removeAssignees","removeLabel","setLabels","unlock","updateLabel","updateMilestone","licenses","getAllCommonlyUsed","markdown","render","renderRaw","headers","meta","getOctocat","getZen","root","migrations","cancelImport","deleteArchiveForAuthenticatedUser","deleteArchiveForOrg","downloadArchiveForOrg","getArchiveForAuthenticatedUser","getCommitAuthors","getImportStatus","getLargeFiles","getStatusForAuthenticatedUser","getStatusForOrg","listReposForOrg","listReposForUser","mapCommitAuthor","setLfsPreference","startForAuthenticatedUser","startForOrg","startImport","unlockRepoForAuthenticatedUser","unlockRepoForOrg","updateImport","orgs","blockUser","cancelInvitation","checkBlockedUser","checkMembershipForUser","checkPublicMembershipForUser","convertMemberToOutsideCollaborator","createInvitation","createWebhook","deleteWebhook","getMembershipForAuthenticatedUser","getMembershipForUser","getWebhook","getWebhookConfigForOrg","listAppInstallations","listBlockedUsers","listFailedInvitations","listInvitationTeams","listMembers","listMembershipsForAuthenticatedUser","listOutsideCollaborators","listPendingInvitations","listPublicMembers","listWebhooks","pingWebhook","removeMember","removeMembershipForUser","removeOutsideCollaborator","removePublicMembershipForAuthenticatedUser","setMembershipForUser","setPublicMembershipForAuthenticatedUser","unblockUser","updateMembershipForAuthenticatedUser","updateWebhook","updateWebhookConfigForOrg","packages","deletePackageForAuthenticatedUser","deletePackageForOrg","deletePackageVersionForAuthenticatedUser","deletePackageVersionForOrg","getAllPackageVersionsForAPackageOwnedByAnOrg","getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser","getAllPackageVersionsForPackageOwnedByAuthenticatedUser","getAllPackageVersionsForPackageOwnedByOrg","getAllPackageVersionsForPackageOwnedByUser","getPackageForAuthenticatedUser","getPackageForOrganization","getPackageForUser","getPackageVersionForAuthenticatedUser","getPackageVersionForOrganization","getPackageVersionForUser","restorePackageForAuthenticatedUser","restorePackageForOrg","restorePackageVersionForAuthenticatedUser","restorePackageVersionForOrg","projects","addCollaborator","createCard","createColumn","createForAuthenticatedUser","createForOrg","createForRepo","deleteCard","deleteColumn","getCard","getColumn","getPermissionForUser","listCards","listCollaborators","listColumns","moveCard","moveColumn","removeCollaborator","updateCard","updateColumn","pulls","checkIfMerged","createReplyForReviewComment","createReview","createReviewComment","deletePendingReview","deleteReviewComment","dismissReview","getReview","getReviewComment","listCommentsForReview","listFiles","listRequestedReviewers","listReviewComments","listReviewCommentsForRepo","listReviews","merge","removeRequestedReviewers","requestReviewers","submitReview","updateBranch","updateReview","updateReviewComment","rateLimit","reactions","createForCommitComment","createForIssue","createForIssueComment","createForPullRequestReviewComment","createForRelease","createForTeamDiscussionCommentInOrg","createForTeamDiscussionInOrg","deleteForCommitComment","deleteForIssue","deleteForIssueComment","deleteForPullRequestComment","deleteForTeamDiscussion","deleteForTeamDiscussionComment","deleteLegacy","deprecated","listForCommitComment","listForIssue","listForIssueComment","listForPullRequestReviewComment","listForTeamDiscussionCommentInOrg","listForTeamDiscussionInOrg","repos","acceptInvitation","addAppAccessRestrictions","mapToData","addStatusCheckContexts","addTeamAccessRestrictions","addUserAccessRestrictions","checkCollaborator","checkVulnerabilityAlerts","compareCommits","compareCommitsWithBasehead","createCommitComment","createCommitSignatureProtection","createCommitStatus","createDeployKey","createDeployment","createDeploymentStatus","createDispatchEvent","createFork","createInOrg","createOrUpdateEnvironment","createOrUpdateFileContents","createPagesSite","createRelease","createUsingTemplate","declineInvitation","deleteAccessRestrictions","deleteAdminBranchProtection","deleteAnEnvironment","deleteBranchProtection","deleteCommitComment","deleteCommitSignatureProtection","deleteDeployKey","deleteDeployment","deleteFile","deleteInvitation","deletePagesSite","deletePullRequestReviewProtection","deleteRelease","deleteReleaseAsset","disableAutomatedSecurityFixes","disableVulnerabilityAlerts","downloadArchive","downloadTarballArchive","downloadZipballArchive","enableAutomatedSecurityFixes","enableVulnerabilityAlerts","getAccessRestrictions","getAdminBranchProtection","getAllEnvironments","getAllStatusCheckContexts","getAllTopics","getAppsWithAccessToProtectedBranch","getBranch","getBranchProtection","getClones","getCodeFrequencyStats","getCollaboratorPermissionLevel","getCombinedStatusForRef","getCommitActivityStats","getCommitComment","getCommitSignatureProtection","getCommunityProfileMetrics","getContent","getContributorsStats","getDeployKey","getDeployment","getDeploymentStatus","getEnvironment","getLatestPagesBuild","getLatestRelease","getPages","getPagesBuild","getPagesHealthCheck","getParticipationStats","getPullRequestReviewProtection","getPunchCardStats","getReadme","getReadmeInDirectory","getRelease","getReleaseAsset","getReleaseByTag","getStatusChecksProtection","getTeamsWithAccessToProtectedBranch","getTopPaths","getTopReferrers","getUsersWithAccessToProtectedBranch","getViews","getWebhookConfigForRepo","listBranches","listBranchesForHeadCommit","listCommentsForCommit","listCommitCommentsForRepo","listCommitStatusesForRef","listContributors","listDeployKeys","listDeploymentStatuses","listDeployments","listInvitations","listInvitationsForAuthenticatedUser","listLanguages","listPagesBuilds","listPullRequestsAssociatedWithCommit","listReleaseAssets","listReleases","listTags","listTeams","removeAppAccessRestrictions","removeStatusCheckContexts","removeStatusCheckProtection","removeTeamAccessRestrictions","removeUserAccessRestrictions","renameBranch","replaceAllTopics","requestPagesBuild","setAdminBranchProtection","setAppAccessRestrictions","setStatusCheckContexts","setTeamAccessRestrictions","setUserAccessRestrictions","testPushWebhook","transfer","updateBranchProtection","updateCommitComment","updateInformationAboutPagesSite","updateInvitation","updatePullRequestReviewProtection","updateRelease","updateReleaseAsset","updateStatusCheckPotection","updateStatusCheckProtection","updateWebhookConfigForRepo","uploadReleaseAsset","baseUrl","search","code","commits","issuesAndPullRequests","labels","topics","users","secretScanning","teams","addOrUpdateMembershipForUserInOrg","addOrUpdateProjectPermissionsInOrg","addOrUpdateRepoPermissionsInOrg","checkPermissionsForProjectInOrg","checkPermissionsForRepoInOrg","createDiscussionCommentInOrg","createDiscussionInOrg","deleteDiscussionCommentInOrg","deleteDiscussionInOrg","deleteInOrg","getByName","getDiscussionCommentInOrg","getDiscussionInOrg","getMembershipForUserInOrg","listChildInOrg","listDiscussionCommentsInOrg","listDiscussionsInOrg","listMembersInOrg","listPendingInvitationsInOrg","listProjectsInOrg","listReposInOrg","removeMembershipForUserInOrg","removeProjectInOrg","removeRepoInOrg","updateDiscussionCommentInOrg","updateDiscussionInOrg","updateInOrg","addEmailForAuthenticated","block","checkBlocked","checkFollowingForUser","checkPersonIsFollowedByAuthenticated","createGpgKeyForAuthenticated","createPublicSshKeyForAuthenticated","deleteEmailForAuthenticated","deleteGpgKeyForAuthenticated","deletePublicSshKeyForAuthenticated","follow","getByUsername","getContextForUser","getGpgKeyForAuthenticated","getPublicSshKeyForAuthenticated","listBlockedByAuthenticated","listEmailsForAuthenticated","listFollowedByAuthenticated","listFollowersForAuthenticatedUser","listFollowersForUser","listFollowingForUser","listGpgKeysForAuthenticated","listGpgKeysForUser","listPublicEmailsForAuthenticated","listPublicKeysForUser","listPublicSshKeysForAuthenticated","setPrimaryEmailVisibilityForAuthenticated","unblock","unfollow","updateAuthenticated","VERSION","endpointsToMethods","octokit","endpointsMap","newMethods","scope","endpoints","Object","entries","methodName","endpoint","route","defaults","decorations","method","url","split","endpointDefaults","assign","scopeMethods","decorate","request","requestWithDefaults","withDecorations","args","options","data","undefined","newScope","newMethodName","log","warn","name","alias","restEndpointMethods","api","ENDPOINTS","rest","legacyRestEndpointMethods"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,MAAMA,SAAS,GAAG;AACdC,EAAAA,OAAO,EAAE;AACLC,IAAAA,0BAA0B,EAAE,CACxB,4EADwB,CADvB;AAILC,IAAAA,kBAAkB,EAAE,CAChB,0DADgB,CAJf;AAOLC,IAAAA,iBAAiB,EAAE,CACf,yDADe,CAPd;AAULC,IAAAA,+BAA+B,EAAE,CAC7B,yFAD6B,CAV5B;AAaLC,IAAAA,uBAAuB,EAAE,CAAC,+CAAD,CAbpB;AAcLC,IAAAA,wBAAwB,EAAE,CACtB,yDADsB,CAdrB;AAiBLC,IAAAA,6BAA6B,EAAE,CAC3B,qDAD2B,CAjB1B;AAoBLC,IAAAA,8BAA8B,EAAE,CAC5B,+DAD4B,CApB3B;AAuBLC,IAAAA,uBAAuB,EAAE,CAAC,+CAAD,CAvBpB;AAwBLC,IAAAA,wBAAwB,EAAE,CACtB,yDADsB,CAxBrB;AA2BLC,IAAAA,sBAAsB,EAAE,CACpB,uEADoB,CA3BnB;AA8BLC,IAAAA,cAAc,EAAE,CACZ,8DADY,CA9BX;AAiCLC,IAAAA,uBAAuB,EAAE,CACrB,4FADqB,CAjCpB;AAoCLC,IAAAA,eAAe,EAAE,CAAC,kDAAD,CApCZ;AAqCLC,IAAAA,gBAAgB,EAAE,CACd,4DADc,CArCb;AAwCLC,IAAAA,6BAA6B,EAAE,CAC3B,gDAD2B,CAxC1B;AA2CLC,IAAAA,8BAA8B,EAAE,CAC5B,0DAD4B,CA3C3B;AA8CLC,IAAAA,iBAAiB,EAAE,CAAC,oDAAD,CA9Cd;AA+CLC,IAAAA,qBAAqB,EAAE,CACnB,yDADmB,CA/ClB;AAkDLC,IAAAA,kDAAkD,EAAE,CAChD,qEADgD,CAlD/C;AAqDLC,IAAAA,eAAe,EAAE,CACb,mEADa,CArDZ;AAwDLC,IAAAA,gBAAgB,EAAE,CACd,4EADc,CAxDb;AA2DLC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CA3D1B;AA8DLC,IAAAA,uBAAuB,EAAE,CACrB,sDADqB,CA9DpB;AAiELC,IAAAA,iDAAiD,EAAE,CAC/C,kEAD+C,CAjE9C;AAoELC,IAAAA,cAAc,EAAE,CACZ,kEADY,CApEX;AAuELC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CAvE1B;AA0ELC,IAAAA,2BAA2B,EAAE,CACzB,gEADyB,CA1ExB;AA6ELC,IAAAA,WAAW,EAAE,CAAC,2DAAD,CA7ER;AA8ELC,IAAAA,uBAAuB,EAAE,CACrB,sFADqB,CA9EpB;AAiFLC,IAAAA,oBAAoB,EAAE,CAClB,yFADkB,CAjFjB;AAoFLC,IAAAA,uCAAuC,EAAE,CACrC,qCADqC,CApFpC;AAuFLC,IAAAA,qCAAqC,EAAE,CACnC,+CADmC,CAvFlC;AA0FLC,IAAAA,oBAAoB,EAAE,CAAC,iDAAD,CA1FjB;AA2FLC,IAAAA,eAAe,EAAE,CAAC,4CAAD,CA3FZ;AA4FLC,IAAAA,YAAY,EAAE,CAAC,+CAAD,CA5FT;AA6FLC,IAAAA,2BAA2B,EAAE,CACzB,qEADyB,CA7FxB;AAgGLC,IAAAA,kBAAkB,EAAE,CAChB,+CADgB,EAEhB,EAFgB,EAGhB;AAAEC,MAAAA,OAAO,EAAE,CAAC,SAAD,EAAY,uCAAZ;AAAX,KAHgB,CAhGf;AAqGLC,IAAAA,gBAAgB,EAAE,CAAC,sDAAD,CArGb;AAsGLC,IAAAA,aAAa,EAAE,CAAC,yDAAD,CAtGV;AAuGLC,IAAAA,gBAAgB,EAAE,CACd,2DADc,CAvGb;AA0GLC,IAAAA,yBAAyB,EAAE,CAAC,6CAAD,CA1GtB;AA2GLC,IAAAA,0BAA0B,EAAE,CACxB,uDADwB,CA3GvB;AA8GLC,IAAAA,WAAW,EAAE,CAAC,2DAAD,CA9GR;AA+GLC,IAAAA,cAAc,EAAE,CAAC,iDAAD,CA/GX;AAgHLC,IAAAA,mBAAmB,EAAE,CACjB,wDADiB,CAhHhB;AAmHLC,IAAAA,gBAAgB,EAAE,CACd,kEADc,CAnHb;AAsHLC,IAAAA,oBAAoB,EAAE,CAAC,6CAAD,CAtHjB;AAuHLC,IAAAA,sBAAsB,EAAE,CACpB,2EADoB,CAvHnB;AA0HLC,IAAAA,sBAAsB,EAAE,CACpB,sDADoB,CA1HnB;AA6HLC,IAAAA,cAAc,EAAE,CAAC,iCAAD,CA7HX;AA8HLC,IAAAA,eAAe,EAAE,CAAC,2CAAD,CA9HZ;AA+HLC,IAAAA,iBAAiB,EAAE,CAAC,6CAAD,CA/Hd;AAgILC,IAAAA,4BAA4B,EAAE,CAAC,2CAAD,CAhIzB;AAiILC,IAAAA,6BAA6B,EAAE,CAC3B,qDAD2B,CAjI1B;AAoILC,IAAAA,6BAA6B,EAAE,CAC3B,4DAD2B,CApI1B;AAuILC,IAAAA,wDAAwD,EAAE,CACtD,kDADsD,CAvIrD;AA0ILC,IAAAA,2BAA2B,EAAE,CAAC,iCAAD,CA1IxB;AA2ILC,IAAAA,4BAA4B,EAAE,CAAC,2CAAD,CA3IzB;AA4ILC,IAAAA,wBAAwB,EAAE,CACtB,2DADsB,CA5IrB;AA+ILC,IAAAA,gBAAgB,EAAE,CACd,gEADc,CA/Ib;AAkJLC,IAAAA,uBAAuB,EAAE,CAAC,wCAAD,CAlJpB;AAmJLC,IAAAA,aAAa,EAAE,CAAC,wDAAD,CAnJV;AAoJLC,IAAAA,+BAA+B,EAAE,CAC7B,+EAD6B,CApJ5B;AAuJLC,IAAAA,8BAA8B,EAAE,CAC5B,sEAD4B,CAvJ3B;AA0JLC,IAAAA,6BAA6B,EAAE,CAC3B,sDAD2B,CA1J1B;AA6JLC,IAAAA,2BAA2B,EAAE,CACzB,gEADyB,CA7JxB;AAgKLC,IAAAA,uCAAuC,EAAE,CACrC,qCADqC,CAhKpC;AAmKLC,IAAAA,qCAAqC,EAAE,CACnC,+CADmC,CAnKlC;AAsKLC,IAAAA,4BAA4B,EAAE,CAC1B,4DAD0B,CAtKzB;AAyKLC,IAAAA,uDAAuD,EAAE,CACrD,kDADqD;AAzKpD,GADK;AA8KdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,qCAAqC,EAAE,CAAC,kCAAD,CADjC;AAENC,IAAAA,sBAAsB,EAAE,CAAC,2CAAD,CAFlB;AAGNC,IAAAA,wBAAwB,EAAE,CACtB,wDADsB,CAHpB;AAMNC,IAAAA,QAAQ,EAAE,CAAC,YAAD,CANJ;AAONC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAPf;AAQNC,IAAAA,SAAS,EAAE,CAAC,wCAAD,CARL;AASNC,IAAAA,yCAAyC,EAAE,CACvC,qDADuC,CATrC;AAYNC,IAAAA,8BAA8B,EAAE,CAAC,8BAAD,CAZ1B;AAaNC,IAAAA,qCAAqC,EAAE,CAAC,oBAAD,CAbjC;AAcNC,IAAAA,iCAAiC,EAAE,CAC/B,yCAD+B,CAd7B;AAiBNC,IAAAA,gBAAgB,EAAE,CAAC,aAAD,CAjBZ;AAkBNC,IAAAA,8BAA8B,EAAE,CAAC,qCAAD,CAlB1B;AAmBNC,IAAAA,uBAAuB,EAAE,CAAC,qCAAD,CAnBnB;AAoBNC,IAAAA,mBAAmB,EAAE,CAAC,wBAAD,CApBf;AAqBNC,IAAAA,yBAAyB,EAAE,CAAC,uCAAD,CArBrB;AAsBNC,IAAAA,+BAA+B,EAAE,CAC7B,8CAD6B,CAtB3B;AAyBNC,IAAAA,cAAc,EAAE,CAAC,kCAAD,CAzBV;AA0BNC,IAAAA,yCAAyC,EAAE,CACvC,yCADuC,CA1BrC;AA6BNC,IAAAA,mCAAmC,EAAE,CAAC,mBAAD,CA7B/B;AA8BNC,IAAAA,sBAAsB,EAAE,CAAC,+BAAD,CA9BlB;AA+BNC,IAAAA,sBAAsB,EAAE,CAAC,qCAAD,CA/BlB;AAgCNC,IAAAA,qBAAqB,EAAE,CAAC,sCAAD,CAhCjB;AAiCNC,IAAAA,oCAAoC,EAAE,CAAC,yBAAD,CAjChC;AAkCNC,IAAAA,mBAAmB,EAAE,CAAC,uCAAD,CAlCf;AAmCNC,IAAAA,uBAAuB,EAAE,CAAC,oBAAD,CAnCnB;AAoCNC,IAAAA,2BAA2B,EAAE,CAAC,yCAAD,CApCvB;AAqCNC,IAAAA,gBAAgB,EAAE,CAAC,0CAAD,CArCZ;AAsCNC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAtCf;AAuCNC,IAAAA,qBAAqB,EAAE,CACnB,qDADmB,CAvCjB;AA0CNC,IAAAA,4BAA4B,EAAE,CAAC,kCAAD,CA1CxB;AA2CNC,IAAAA,8BAA8B,EAAE,CAAC,qCAAD;AA3C1B,GA9KI;AA2NdC,EAAAA,IAAI,EAAE;AACFC,IAAAA,qBAAqB,EAAE,CACnB,wEADmB,CADrB;AAIFC,IAAAA,UAAU,EAAE,CAAC,sCAAD,CAJV;AAKFC,IAAAA,uBAAuB,EAAE,CACrB,6DADqB,EAErB;AAAEC,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFqB,CALvB;AASFC,IAAAA,8BAA8B,EAAE,CAC5B,kFAD4B,EAE5B;AAAEF,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAF4B,CAT9B;AAaFE,IAAAA,kBAAkB,EAAE,CAAC,wCAAD,CAblB;AAcFC,IAAAA,6BAA6B,EAAE,CAC3B,yDAD2B,CAd7B;AAiBFC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAjBnB;AAkBFC,IAAAA,kBAAkB,EAAE,CAAC,6CAAD,CAlBlB;AAmBFC,IAAAA,WAAW,EAAE,CAAC,wCAAD,CAnBX;AAoBFC,IAAAA,gBAAgB,EAAE,CAAC,UAAD,CApBhB;AAqBFC,IAAAA,SAAS,EAAE,CAAC,sBAAD,CArBT;AAsBFC,IAAAA,eAAe,EAAE,CAAC,0CAAD,CAtBf;AAuBFC,IAAAA,kBAAkB,EAAE,CAAC,8BAAD,CAvBlB;AAwBFC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CAxBnB;AAyBFC,IAAAA,6BAA6B,EAAE,CAC3B,gDAD2B,CAzB7B;AA4BFC,IAAAA,oCAAoC,EAAE,CAClC,wDADkC,CA5BpC;AA+BFC,IAAAA,mBAAmB,EAAE,CAAC,oCAAD,CA/BnB;AAgCFC,IAAAA,sBAAsB,EAAE,CAAC,sBAAD,CAhCtB;AAiCFC,IAAAA,mBAAmB,EAAE,CAAC,mDAAD,CAjCnB;AAkCFC,IAAAA,0BAA0B,EAAE,CACxB,2DADwB,CAlC1B;AAqCFC,IAAAA,yCAAyC,EAAE,CACvC,wDADuC,CArCzC;AAwCFC,IAAAA,iBAAiB,EAAE,CAAC,wBAAD,CAxCjB;AAyCFC,IAAAA,qCAAqC,EAAE,CAAC,yBAAD,CAzCrC;AA0CFC,IAAAA,SAAS,EAAE,CAAC,gCAAD,CA1CT;AA2CFC,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CA3ChB;AA4CFC,IAAAA,iCAAiC,EAAE,CAAC,gCAAD,CA5CjC;AA6CFC,IAAAA,qCAAqC,EAAE,CAAC,iCAAD,CA7CrC;AA8CFC,IAAAA,4CAA4C,EAAE,CAC1C,yCAD0C,CA9C5C;AAiDFC,IAAAA,0BAA0B,EAAE,CACxB,2EADwB,CAjD1B;AAoDFC,IAAAA,UAAU,EAAE,CAAC,uCAAD,CApDV;AAqDFC,IAAAA,6BAA6B,EAAE,CAAC,4BAAD,CArD7B;AAsDFC,IAAAA,UAAU,EAAE,CAAC,6CAAD,CAtDV;AAuDFC,IAAAA,mBAAmB,EAAE,CAAC,oDAAD,CAvDnB;AAwDFC,IAAAA,qBAAqB,EAAE,CACnB,uDADmB,CAxDrB;AA2DFC,IAAAA,yBAAyB,EAAE,CAAC,wBAAD;AA3DzB,GA3NQ;AAwRdC,EAAAA,OAAO,EAAE;AACLC,IAAAA,0BAA0B,EAAE,CAAC,0CAAD,CADvB;AAELC,IAAAA,2BAA2B,EAAE,CACzB,gDADyB,CAFxB;AAKLC,IAAAA,2BAA2B,EAAE,CAAC,2CAAD,CALxB;AAMLC,IAAAA,4BAA4B,EAAE,CAC1B,iDAD0B,CANzB;AASLC,IAAAA,0BAA0B,EAAE,CACxB,iDADwB,CATvB;AAYLC,IAAAA,2BAA2B,EAAE,CACzB,uDADyB;AAZxB,GAxRK;AAwSdC,EAAAA,MAAM,EAAE;AACJC,IAAAA,MAAM,EAAE,CAAC,uCAAD,CADJ;AAEJC,IAAAA,WAAW,EAAE,CAAC,yCAAD,CAFT;AAGJC,IAAAA,GAAG,EAAE,CAAC,qDAAD,CAHD;AAIJC,IAAAA,QAAQ,EAAE,CAAC,yDAAD,CAJN;AAKJC,IAAAA,eAAe,EAAE,CACb,iEADa,CALb;AAQJC,IAAAA,UAAU,EAAE,CAAC,oDAAD,CARR;AASJC,IAAAA,YAAY,EAAE,CACV,oEADU,CATV;AAYJC,IAAAA,gBAAgB,EAAE,CAAC,sDAAD,CAZd;AAaJC,IAAAA,cAAc,EAAE,CACZ,oEADY,CAbZ;AAgBJC,IAAAA,oBAAoB,EAAE,CAClB,sDADkB,CAhBlB;AAmBJC,IAAAA,MAAM,EAAE,CAAC,uDAAD;AAnBJ,GAxSM;AA6TdC,EAAAA,YAAY,EAAE;AACVC,IAAAA,cAAc,EAAE,CACZ,oFADY,CADN;AAIVC,IAAAA,QAAQ,EAAE,CACN,+DADM,EAEN,EAFM,EAGN;AAAEC,MAAAA,iBAAiB,EAAE;AAAEC,QAAAA,QAAQ,EAAE;AAAZ;AAArB,KAHM,CAJA;AASVC,IAAAA,WAAW,EAAE,CACT,gEADS,CATH;AAYVC,IAAAA,QAAQ,EAAE,CAAC,2DAAD,CAZA;AAaVC,IAAAA,kBAAkB,EAAE,CAChB,yEADgB,CAbV;AAgBVC,IAAAA,iBAAiB,EAAE,CAAC,gDAAD,CAhBT;AAiBVC,IAAAA,mBAAmB,EAAE,CACjB,yEADiB,EAEjB,EAFiB,EAGjB;AAAEpI,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,oBAAjB;AAAX,KAHiB,CAjBX;AAsBVqI,IAAAA,kBAAkB,EAAE,CAAC,kDAAD,CAtBV;AAuBVC,IAAAA,WAAW,EAAE,CACT,iEADS,CAvBH;AA0BVC,IAAAA,WAAW,EAAE,CAAC,iDAAD;AA1BH,GA7TA;AAyVdC,EAAAA,cAAc,EAAE;AACZC,IAAAA,oBAAoB,EAAE,CAClB,uBADkB,EAElB;AAAEnE,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFkB,CADV;AAKZmE,IAAAA,cAAc,EAAE,CACZ,6BADY,EAEZ;AAAEpE,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFY,CALJ;AASZoE,IAAAA,UAAU,EAAE,CACR,qDADQ,EAER;AAAErE,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFQ;AATA,GAzVF;AAuWdqE,EAAAA,MAAM,EAAE;AAAE1B,IAAAA,GAAG,EAAE,CAAC,aAAD;AAAP,GAvWM;AAwWd2B,EAAAA,eAAe,EAAE;AACbC,IAAAA,kDAAkD,EAAE,CAChD,6EADgD,CADvC;AAIbC,IAAAA,iDAAiD,EAAE,CAC/C,0EAD+C,CAJtC;AAObC,IAAAA,2BAA2B,EAAE,CACzB,oEADyB,CAPhB;AAUbC,IAAAA,qCAAqC,EAAE,CACnC,mDADmC,CAV1B;AAabC,IAAAA,uDAAuD,EAAE,CACrD,iEADqD,CAb5C;AAgBbC,IAAAA,2BAA2B,EAAE,CACzB,oEADyB,CAhBhB;AAmBbC,IAAAA,qCAAqC,EAAE,CACnC,mDADmC,CAnB1B;AAsBbC,IAAAA,sDAAsD,EAAE,CACpD,iEADoD;AAtB3C,GAxWH;AAkYdC,EAAAA,KAAK,EAAE;AACHC,IAAAA,cAAc,EAAE,CAAC,2BAAD,CADb;AAEHvC,IAAAA,MAAM,EAAE,CAAC,aAAD,CAFL;AAGHwC,IAAAA,aAAa,EAAE,CAAC,gCAAD,CAHZ;AAIHC,IAAAA,MAAM,EAAE,CAAC,yBAAD,CAJL;AAKHC,IAAAA,aAAa,EAAE,CAAC,+CAAD,CALZ;AAMHC,IAAAA,IAAI,EAAE,CAAC,6BAAD,CANH;AAOHzC,IAAAA,GAAG,EAAE,CAAC,sBAAD,CAPF;AAQH0C,IAAAA,UAAU,EAAE,CAAC,4CAAD,CART;AASHC,IAAAA,WAAW,EAAE,CAAC,4BAAD,CATV;AAUHC,IAAAA,IAAI,EAAE,CAAC,YAAD,CAVH;AAWHC,IAAAA,YAAY,EAAE,CAAC,+BAAD,CAXX;AAYHC,IAAAA,WAAW,EAAE,CAAC,8BAAD,CAZV;AAaHC,IAAAA,WAAW,EAAE,CAAC,6BAAD,CAbV;AAcHC,IAAAA,SAAS,EAAE,CAAC,4BAAD,CAdR;AAeHC,IAAAA,UAAU,EAAE,CAAC,mBAAD,CAfT;AAgBHC,IAAAA,WAAW,EAAE,CAAC,oBAAD,CAhBV;AAiBHC,IAAAA,IAAI,EAAE,CAAC,2BAAD,CAjBH;AAkBHC,IAAAA,MAAM,EAAE,CAAC,8BAAD,CAlBL;AAmBH5C,IAAAA,MAAM,EAAE,CAAC,wBAAD,CAnBL;AAoBH6C,IAAAA,aAAa,EAAE,CAAC,8CAAD;AApBZ,GAlYO;AAwZdC,EAAAA,GAAG,EAAE;AACDC,IAAAA,UAAU,EAAE,CAAC,sCAAD,CADX;AAEDC,IAAAA,YAAY,EAAE,CAAC,wCAAD,CAFb;AAGDC,IAAAA,SAAS,EAAE,CAAC,qCAAD,CAHV;AAIDC,IAAAA,SAAS,EAAE,CAAC,qCAAD,CAJV;AAKDC,IAAAA,UAAU,EAAE,CAAC,sCAAD,CALX;AAMDC,IAAAA,SAAS,EAAE,CAAC,6CAAD,CANV;AAODC,IAAAA,OAAO,EAAE,CAAC,gDAAD,CAPR;AAQDC,IAAAA,SAAS,EAAE,CAAC,oDAAD,CARV;AASDC,IAAAA,MAAM,EAAE,CAAC,yCAAD,CATP;AAUDC,IAAAA,MAAM,EAAE,CAAC,8CAAD,CAVP;AAWDC,IAAAA,OAAO,EAAE,CAAC,gDAAD,CAXR;AAYDC,IAAAA,gBAAgB,EAAE,CAAC,mDAAD,CAZjB;AAaDC,IAAAA,SAAS,EAAE,CAAC,4CAAD;AAbV,GAxZS;AAuadC,EAAAA,SAAS,EAAE;AACPC,IAAAA,eAAe,EAAE,CAAC,0BAAD,CADV;AAEPC,IAAAA,WAAW,EAAE,CAAC,iCAAD;AAFN,GAvaG;AA2adC,EAAAA,YAAY,EAAE;AACVC,IAAAA,mCAAmC,EAAE,CAAC,8BAAD,CAD3B;AAEVC,IAAAA,qBAAqB,EAAE,CAAC,oCAAD,CAFb;AAGVC,IAAAA,sBAAsB,EAAE,CAAC,8CAAD,CAHd;AAIVC,IAAAA,iCAAiC,EAAE,CAC/B,8BAD+B,EAE/B,EAF+B,EAG/B;AAAE7L,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,qCAAjB;AAAX,KAH+B,CAJzB;AASV8L,IAAAA,sCAAsC,EAAE,CAAC,iCAAD,CAT9B;AAUVC,IAAAA,wBAAwB,EAAE,CAAC,uCAAD,CAVhB;AAWVC,IAAAA,yBAAyB,EAAE,CACvB,iDADuB,CAXjB;AAcVC,IAAAA,oCAAoC,EAAE,CAClC,iCADkC,EAElC,EAFkC,EAGlC;AAAEjM,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,wCAAjB;AAAX,KAHkC,CAd5B;AAmBVkM,IAAAA,mCAAmC,EAAE,CAAC,8BAAD,CAnB3B;AAoBVC,IAAAA,qBAAqB,EAAE,CAAC,oCAAD,CApBb;AAqBVC,IAAAA,sBAAsB,EAAE,CAAC,8CAAD,CArBd;AAsBVC,IAAAA,iCAAiC,EAAE,CAC/B,8BAD+B,EAE/B,EAF+B,EAG/B;AAAErM,MAAAA,OAAO,EAAE,CAAC,cAAD,EAAiB,qCAAjB;AAAX,KAH+B;AAtBzB,GA3aA;AAucdsM,EAAAA,MAAM,EAAE;AACJC,IAAAA,YAAY,EAAE,CACV,4DADU,CADV;AAIJC,IAAAA,SAAS,EAAE,CAAC,yDAAD,CAJP;AAKJC,IAAAA,sBAAsB,EAAE,CAAC,gDAAD,CALpB;AAMJzF,IAAAA,MAAM,EAAE,CAAC,mCAAD,CANJ;AAOJwC,IAAAA,aAAa,EAAE,CACX,2DADW,CAPX;AAUJkD,IAAAA,WAAW,EAAE,CAAC,mCAAD,CAVT;AAWJC,IAAAA,eAAe,EAAE,CAAC,uCAAD,CAXb;AAYJjD,IAAAA,aAAa,EAAE,CACX,2DADW,CAZX;AAeJkD,IAAAA,WAAW,EAAE,CAAC,4CAAD,CAfT;AAgBJC,IAAAA,eAAe,EAAE,CACb,4DADa,CAhBb;AAmBJ3F,IAAAA,GAAG,EAAE,CAAC,iDAAD,CAnBD;AAoBJ0C,IAAAA,UAAU,EAAE,CAAC,wDAAD,CApBR;AAqBJkD,IAAAA,QAAQ,EAAE,CAAC,oDAAD,CArBN;AAsBJC,IAAAA,QAAQ,EAAE,CAAC,yCAAD,CAtBN;AAuBJC,IAAAA,YAAY,EAAE,CAAC,yDAAD,CAvBV;AAwBJlD,IAAAA,IAAI,EAAE,CAAC,aAAD,CAxBF;AAyBJmD,IAAAA,aAAa,EAAE,CAAC,qCAAD,CAzBX;AA0BJlD,IAAAA,YAAY,EAAE,CAAC,0DAAD,CA1BV;AA2BJmD,IAAAA,mBAAmB,EAAE,CAAC,2CAAD,CA3BjB;AA4BJC,IAAAA,UAAU,EAAE,CAAC,wDAAD,CA5BR;AA6BJC,IAAAA,iBAAiB,EAAE,CAAC,yCAAD,CA7Bf;AA8BJC,IAAAA,qBAAqB,EAAE,CACnB,0DADmB,EAEnB;AAAE/I,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,aAAD;AAAZ;AAAb,KAFmB,CA9BnB;AAkCJ+I,IAAAA,wBAAwB,EAAE,CAAC,kBAAD,CAlCtB;AAmCJC,IAAAA,UAAU,EAAE,CAAC,wBAAD,CAnCR;AAoCJC,IAAAA,WAAW,EAAE,CAAC,kCAAD,CApCT;AAqCJC,IAAAA,sBAAsB,EAAE,CACpB,gEADoB,CArCpB;AAwCJC,IAAAA,iBAAiB,EAAE,CAAC,kCAAD,CAxCf;AAyCJC,IAAAA,iBAAiB,EAAE,CACf,wDADe,CAzCf;AA4CJC,IAAAA,cAAc,EAAE,CAAC,sCAAD,CA5CZ;AA6CJC,IAAAA,IAAI,EAAE,CAAC,sDAAD,CA7CF;AA8CJC,IAAAA,eAAe,EAAE,CACb,2DADa,CA9Cb;AAiDJC,IAAAA,eAAe,EAAE,CACb,8DADa,CAjDb;AAoDJC,IAAAA,WAAW,EAAE,CACT,kEADS,CApDT;AAuDJC,IAAAA,SAAS,EAAE,CAAC,wDAAD,CAvDP;AAwDJC,IAAAA,MAAM,EAAE,CAAC,yDAAD,CAxDJ;AAyDJxG,IAAAA,MAAM,EAAE,CAAC,mDAAD,CAzDJ;AA0DJ6C,IAAAA,aAAa,EAAE,CAAC,0DAAD,CA1DX;AA2DJ4D,IAAAA,WAAW,EAAE,CAAC,2CAAD,CA3DT;AA4DJC,IAAAA,eAAe,EAAE,CACb,2DADa;AA5Db,GAvcM;AAugBdC,EAAAA,QAAQ,EAAE;AACNnH,IAAAA,GAAG,EAAE,CAAC,yBAAD,CADC;AAENoH,IAAAA,kBAAkB,EAAE,CAAC,eAAD,CAFd;AAGN3F,IAAAA,UAAU,EAAE,CAAC,mCAAD;AAHN,GAvgBI;AA4gBd4F,EAAAA,QAAQ,EAAE;AACNC,IAAAA,MAAM,EAAE,CAAC,gBAAD,CADF;AAENC,IAAAA,SAAS,EAAE,CACP,oBADO,EAEP;AAAEC,MAAAA,OAAO,EAAE;AAAE,wBAAgB;AAAlB;AAAX,KAFO;AAFL,GA5gBI;AAmhBdC,EAAAA,IAAI,EAAE;AACFzH,IAAAA,GAAG,EAAE,CAAC,WAAD,CADH;AAEF0H,IAAAA,UAAU,EAAE,CAAC,cAAD,CAFV;AAGFC,IAAAA,MAAM,EAAE,CAAC,UAAD,CAHN;AAIFC,IAAAA,IAAI,EAAE,CAAC,OAAD;AAJJ,GAnhBQ;AAyhBdC,EAAAA,UAAU,EAAE;AACRC,IAAAA,YAAY,EAAE,CAAC,qCAAD,CADN;AAERC,IAAAA,iCAAiC,EAAE,CAC/B,gDAD+B,EAE/B;AAAE3K,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAF+B,CAF3B;AAMR2K,IAAAA,mBAAmB,EAAE,CACjB,sDADiB,EAEjB;AAAE5K,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFiB,CANb;AAUR4K,IAAAA,qBAAqB,EAAE,CACnB,mDADmB,EAEnB;AAAE7K,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFmB,CAVf;AAcR6K,IAAAA,8BAA8B,EAAE,CAC5B,6CAD4B,EAE5B;AAAE9K,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAF4B,CAdxB;AAkBR8K,IAAAA,gBAAgB,EAAE,CAAC,0CAAD,CAlBV;AAmBRC,IAAAA,eAAe,EAAE,CAAC,kCAAD,CAnBT;AAoBRC,IAAAA,aAAa,EAAE,CAAC,8CAAD,CApBP;AAqBRC,IAAAA,6BAA6B,EAAE,CAC3B,qCAD2B,EAE3B;AAAElL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAF2B,CArBvB;AAyBRkL,IAAAA,eAAe,EAAE,CACb,2CADa,EAEb;AAAEnL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFa,CAzBT;AA6BR+I,IAAAA,wBAAwB,EAAE,CACtB,sBADsB,EAEtB;AAAEhJ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFsB,CA7BlB;AAiCRgJ,IAAAA,UAAU,EAAE,CACR,4BADQ,EAER;AAAEjJ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFQ,CAjCJ;AAqCRmL,IAAAA,eAAe,EAAE,CACb,wDADa,EAEb;AAAEpL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFa,CArCT;AAyCRoL,IAAAA,gBAAgB,EAAE,CACd,kDADc,EAEd;AAAErL,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFc,CAzCV;AA6CRqL,IAAAA,eAAe,EAAE,CAAC,wDAAD,CA7CT;AA8CRC,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CA9CV;AA+CRC,IAAAA,yBAAyB,EAAE,CAAC,uBAAD,CA/CnB;AAgDRC,IAAAA,WAAW,EAAE,CAAC,6BAAD,CAhDL;AAiDRC,IAAAA,WAAW,EAAE,CAAC,kCAAD,CAjDL;AAkDRC,IAAAA,8BAA8B,EAAE,CAC5B,+DAD4B,EAE5B;AAAE3L,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAF4B,CAlDxB;AAsDR2L,IAAAA,gBAAgB,EAAE,CACd,qEADc,EAEd;AAAE5L,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,WAAD;AAAZ;AAAb,KAFc,CAtDV;AA0DR4L,IAAAA,YAAY,EAAE,CAAC,oCAAD;AA1DN,GAzhBE;AAqlBdC,EAAAA,IAAI,EAAE;AACFC,IAAAA,SAAS,EAAE,CAAC,mCAAD,CADT;AAEFC,IAAAA,gBAAgB,EAAE,CAAC,gDAAD,CAFhB;AAGFC,IAAAA,gBAAgB,EAAE,CAAC,mCAAD,CAHhB;AAIFC,IAAAA,sBAAsB,EAAE,CAAC,oCAAD,CAJtB;AAKFC,IAAAA,4BAA4B,EAAE,CAAC,2CAAD,CAL5B;AAMFC,IAAAA,kCAAkC,EAAE,CAChC,kDADgC,CANlC;AASFC,IAAAA,gBAAgB,EAAE,CAAC,8BAAD,CAThB;AAUFC,IAAAA,aAAa,EAAE,CAAC,wBAAD,CAVb;AAWFC,IAAAA,aAAa,EAAE,CAAC,oCAAD,CAXb;AAYF3J,IAAAA,GAAG,EAAE,CAAC,iBAAD,CAZH;AAaF4J,IAAAA,iCAAiC,EAAE,CAAC,kCAAD,CAbjC;AAcFC,IAAAA,oBAAoB,EAAE,CAAC,wCAAD,CAdpB;AAeFC,IAAAA,UAAU,EAAE,CAAC,iCAAD,CAfV;AAgBFC,IAAAA,sBAAsB,EAAE,CAAC,wCAAD,CAhBtB;AAiBFnH,IAAAA,IAAI,EAAE,CAAC,oBAAD,CAjBJ;AAkBFoH,IAAAA,oBAAoB,EAAE,CAAC,+BAAD,CAlBpB;AAmBFC,IAAAA,gBAAgB,EAAE,CAAC,wBAAD,CAnBhB;AAoBFC,IAAAA,qBAAqB,EAAE,CAAC,oCAAD,CApBrB;AAqBF9D,IAAAA,wBAAwB,EAAE,CAAC,gBAAD,CArBxB;AAsBFrD,IAAAA,WAAW,EAAE,CAAC,4BAAD,CAtBX;AAuBFoH,IAAAA,mBAAmB,EAAE,CAAC,mDAAD,CAvBnB;AAwBFC,IAAAA,WAAW,EAAE,CAAC,yBAAD,CAxBX;AAyBFC,IAAAA,mCAAmC,EAAE,CAAC,4BAAD,CAzBnC;AA0BFC,IAAAA,wBAAwB,EAAE,CAAC,uCAAD,CA1BxB;AA2BFC,IAAAA,sBAAsB,EAAE,CAAC,6BAAD,CA3BtB;AA4BFC,IAAAA,iBAAiB,EAAE,CAAC,gCAAD,CA5BjB;AA6BFC,IAAAA,YAAY,EAAE,CAAC,uBAAD,CA7BZ;AA8BFC,IAAAA,WAAW,EAAE,CAAC,wCAAD,CA9BX;AA+BFC,IAAAA,YAAY,EAAE,CAAC,uCAAD,CA/BZ;AAgCFC,IAAAA,uBAAuB,EAAE,CAAC,2CAAD,CAhCvB;AAiCFC,IAAAA,yBAAyB,EAAE,CACvB,qDADuB,CAjCzB;AAoCFC,IAAAA,0CAA0C,EAAE,CACxC,8CADwC,CApC1C;AAuCFC,IAAAA,oBAAoB,EAAE,CAAC,wCAAD,CAvCpB;AAwCFC,IAAAA,uCAAuC,EAAE,CACrC,2CADqC,CAxCvC;AA2CFC,IAAAA,WAAW,EAAE,CAAC,sCAAD,CA3CX;AA4CFzK,IAAAA,MAAM,EAAE,CAAC,mBAAD,CA5CN;AA6CF0K,IAAAA,oCAAoC,EAAE,CAClC,oCADkC,CA7CpC;AAgDFC,IAAAA,aAAa,EAAE,CAAC,mCAAD,CAhDb;AAiDFC,IAAAA,yBAAyB,EAAE,CAAC,0CAAD;AAjDzB,GArlBQ;AAwoBdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,iCAAiC,EAAE,CAC/B,qDAD+B,CAD7B;AAINC,IAAAA,mBAAmB,EAAE,CACjB,2DADiB,CAJf;AAONC,IAAAA,wCAAwC,EAAE,CACtC,mFADsC,CAPpC;AAUNC,IAAAA,0BAA0B,EAAE,CACxB,yFADwB,CAVtB;AAaNC,IAAAA,4CAA4C,EAAE,CAC1C,iEAD0C,EAE1C,EAF0C,EAG1C;AAAE5S,MAAAA,OAAO,EAAE,CAAC,UAAD,EAAa,2CAAb;AAAX,KAH0C,CAbxC;AAkBN6S,IAAAA,2DAA2D,EAAE,CACzD,2DADyD,EAEzD,EAFyD,EAGzD;AACI7S,MAAAA,OAAO,EAAE,CACL,UADK,EAEL,yDAFK;AADb,KAHyD,CAlBvD;AA4BN8S,IAAAA,uDAAuD,EAAE,CACrD,2DADqD,CA5BnD;AA+BNC,IAAAA,yCAAyC,EAAE,CACvC,iEADuC,CA/BrC;AAkCNC,IAAAA,0CAA0C,EAAE,CACxC,uEADwC,CAlCtC;AAqCNC,IAAAA,8BAA8B,EAAE,CAC5B,kDAD4B,CArC1B;AAwCNC,IAAAA,yBAAyB,EAAE,CACvB,wDADuB,CAxCrB;AA2CNC,IAAAA,iBAAiB,EAAE,CACf,8DADe,CA3Cb;AA8CNC,IAAAA,qCAAqC,EAAE,CACnC,gFADmC,CA9CjC;AAiDNC,IAAAA,gCAAgC,EAAE,CAC9B,sFAD8B,CAjD5B;AAoDNC,IAAAA,wBAAwB,EAAE,CACtB,4FADsB,CApDpB;AAuDNC,IAAAA,kCAAkC,EAAE,CAChC,mEADgC,CAvD9B;AA0DNC,IAAAA,oBAAoB,EAAE,CAClB,yEADkB,CA1DhB;AA6DNC,IAAAA,yCAAyC,EAAE,CACvC,yFADuC,CA7DrC;AAgENC,IAAAA,2BAA2B,EAAE,CACzB,+FADyB;AAhEvB,GAxoBI;AA4sBdC,EAAAA,QAAQ,EAAE;AACNC,IAAAA,eAAe,EAAE,CACb,qDADa,EAEb;AAAEtP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFa,CADX;AAKNsP,IAAAA,UAAU,EAAE,CACR,0CADQ,EAER;AAAEvP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CALN;AASNuP,IAAAA,YAAY,EAAE,CACV,qCADU,EAEV;AAAExP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFU,CATR;AAaNwP,IAAAA,0BAA0B,EAAE,CACxB,qBADwB,EAExB;AAAEzP,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFwB,CAbtB;AAiBNyP,IAAAA,YAAY,EAAE,CACV,2BADU,EAEV;AAAE1P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFU,CAjBR;AAqBN0P,IAAAA,aAAa,EAAE,CACX,qCADW,EAEX;AAAE3P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFW,CArBT;AAyBNkF,IAAAA,MAAM,EAAE,CACJ,+BADI,EAEJ;AAAEnF,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFI,CAzBF;AA6BN2P,IAAAA,UAAU,EAAE,CACR,0CADQ,EAER;AAAE5P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CA7BN;AAiCN4P,IAAAA,YAAY,EAAE,CACV,sCADU,EAEV;AAAE7P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFU,CAjCR;AAqCN2C,IAAAA,GAAG,EAAE,CACD,4BADC,EAED;AAAE5C,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFC,CArCC;AAyCN6P,IAAAA,OAAO,EAAE,CACL,uCADK,EAEL;AAAE9P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFK,CAzCH;AA6CN8P,IAAAA,SAAS,EAAE,CACP,mCADO,EAEP;AAAE/P,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFO,CA7CL;AAiDN+P,IAAAA,oBAAoB,EAAE,CAClB,gEADkB,EAElB;AAAEhQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFkB,CAjDhB;AAqDNgQ,IAAAA,SAAS,EAAE,CACP,yCADO,EAEP;AAAEjQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFO,CArDL;AAyDNiQ,IAAAA,iBAAiB,EAAE,CACf,0CADe,EAEf;AAAElQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFe,CAzDb;AA6DNkQ,IAAAA,WAAW,EAAE,CACT,oCADS,EAET;AAAEnQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFS,CA7DP;AAiENgJ,IAAAA,UAAU,EAAE,CACR,0BADQ,EAER;AAAEjJ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CAjEN;AAqENiJ,IAAAA,WAAW,EAAE,CACT,oCADS,EAET;AAAElJ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFS,CArEP;AAyEN0F,IAAAA,WAAW,EAAE,CACT,gCADS,EAET;AAAE3F,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFS,CAzEP;AA6ENmQ,IAAAA,QAAQ,EAAE,CACN,8CADM,EAEN;AAAEpQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFM,CA7EJ;AAiFNoQ,IAAAA,UAAU,EAAE,CACR,0CADQ,EAER;AAAErQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CAjFN;AAqFNqQ,IAAAA,kBAAkB,EAAE,CAChB,wDADgB,EAEhB;AAAEtQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFgB,CArFd;AAyFNmD,IAAAA,MAAM,EAAE,CACJ,8BADI,EAEJ;AAAEpD,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFI,CAzFF;AA6FNsQ,IAAAA,UAAU,EAAE,CACR,yCADQ,EAER;AAAEvQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFQ,CA7FN;AAiGNuQ,IAAAA,YAAY,EAAE,CACV,qCADU,EAEV;AAAExQ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFU;AAjGR,GA5sBI;AAkzBdwQ,EAAAA,KAAK,EAAE;AACHC,IAAAA,aAAa,EAAE,CAAC,qDAAD,CADZ;AAEHhO,IAAAA,MAAM,EAAE,CAAC,kCAAD,CAFL;AAGHiO,IAAAA,2BAA2B,EAAE,CACzB,8EADyB,CAH1B;AAMHC,IAAAA,YAAY,EAAE,CAAC,wDAAD,CANX;AAOHC,IAAAA,mBAAmB,EAAE,CACjB,yDADiB,CAPlB;AAUHC,IAAAA,mBAAmB,EAAE,CACjB,sEADiB,CAVlB;AAaHC,IAAAA,mBAAmB,EAAE,CACjB,0DADiB,CAblB;AAgBHC,IAAAA,aAAa,EAAE,CACX,8EADW,CAhBZ;AAmBHpO,IAAAA,GAAG,EAAE,CAAC,+CAAD,CAnBF;AAoBHqO,IAAAA,SAAS,EAAE,CACP,mEADO,CApBR;AAuBHC,IAAAA,gBAAgB,EAAE,CAAC,uDAAD,CAvBf;AAwBH1L,IAAAA,IAAI,EAAE,CAAC,iCAAD,CAxBH;AAyBH2L,IAAAA,qBAAqB,EAAE,CACnB,4EADmB,CAzBpB;AA4BHzL,IAAAA,WAAW,EAAE,CAAC,uDAAD,CA5BV;AA6BH0L,IAAAA,SAAS,EAAE,CAAC,qDAAD,CA7BR;AA8BHC,IAAAA,sBAAsB,EAAE,CACpB,mEADoB,CA9BrB;AAiCHC,IAAAA,kBAAkB,EAAE,CAChB,wDADgB,CAjCjB;AAoCHC,IAAAA,yBAAyB,EAAE,CAAC,0CAAD,CApCxB;AAqCHC,IAAAA,WAAW,EAAE,CAAC,uDAAD,CArCV;AAsCHC,IAAAA,KAAK,EAAE,CAAC,qDAAD,CAtCJ;AAuCHC,IAAAA,wBAAwB,EAAE,CACtB,sEADsB,CAvCvB;AA0CHC,IAAAA,gBAAgB,EAAE,CACd,oEADc,CA1Cf;AA6CHC,IAAAA,YAAY,EAAE,CACV,2EADU,CA7CX;AAgDHxO,IAAAA,MAAM,EAAE,CAAC,iDAAD,CAhDL;AAiDHyO,IAAAA,YAAY,EAAE,CACV,6DADU,EAEV;AAAE7R,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAFU,CAjDX;AAqDH6R,IAAAA,YAAY,EAAE,CACV,mEADU,CArDX;AAwDHC,IAAAA,mBAAmB,EAAE,CACjB,yDADiB;AAxDlB,GAlzBO;AA82BdC,EAAAA,SAAS,EAAE;AAAEpP,IAAAA,GAAG,EAAE,CAAC,iBAAD;AAAP,GA92BG;AA+2BdqP,EAAAA,SAAS,EAAE;AACPC,IAAAA,sBAAsB,EAAE,CACpB,4DADoB,EAEpB;AAAElS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFoB,CADjB;AAKPkS,IAAAA,cAAc,EAAE,CACZ,4DADY,EAEZ;AAAEnS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFY,CALT;AASPmS,IAAAA,qBAAqB,EAAE,CACnB,mEADmB,EAEnB;AAAEpS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFmB,CAThB;AAaPoS,IAAAA,iCAAiC,EAAE,CAC/B,kEAD+B,EAE/B;AAAErS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF+B,CAb5B;AAiBPqS,IAAAA,gBAAgB,EAAE,CACd,4DADc,EAEd;AAAEtS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFc,CAjBX;AAqBPsS,IAAAA,mCAAmC,EAAE,CACjC,wGADiC,EAEjC;AAAEvS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFiC,CArB9B;AAyBPuS,IAAAA,4BAA4B,EAAE,CAC1B,8EAD0B,EAE1B;AAAExS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF0B,CAzBvB;AA6BPwS,IAAAA,sBAAsB,EAAE,CACpB,4EADoB,EAEpB;AAAEzS,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFoB,CA7BjB;AAiCPyS,IAAAA,cAAc,EAAE,CACZ,4EADY,EAEZ;AAAE1S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFY,CAjCT;AAqCP0S,IAAAA,qBAAqB,EAAE,CACnB,mFADmB,EAEnB;AAAE3S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFmB,CArChB;AAyCP2S,IAAAA,2BAA2B,EAAE,CACzB,kFADyB,EAEzB;AAAE5S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFyB,CAzCtB;AA6CP4S,IAAAA,uBAAuB,EAAE,CACrB,8FADqB,EAErB;AAAE7S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFqB,CA7ClB;AAiDP6S,IAAAA,8BAA8B,EAAE,CAC5B,wHAD4B,EAE5B;AAAE9S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF4B,CAjDzB;AAqDP8S,IAAAA,YAAY,EAAE,CACV,iCADU,EAEV;AAAE/S,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFU,EAGV;AACI+S,MAAAA,UAAU,EAAE;AADhB,KAHU,CArDP;AA4DPC,IAAAA,oBAAoB,EAAE,CAClB,2DADkB,EAElB;AAAEjT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFkB,CA5Df;AAgEPiT,IAAAA,YAAY,EAAE,CACV,2DADU,EAEV;AAAElT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFU,CAhEP;AAoEPkT,IAAAA,mBAAmB,EAAE,CACjB,kEADiB,EAEjB;AAAEnT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFiB,CApEd;AAwEPmT,IAAAA,+BAA+B,EAAE,CAC7B,iEAD6B,EAE7B;AAAEpT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF6B,CAxE1B;AA4EPoT,IAAAA,iCAAiC,EAAE,CAC/B,uGAD+B,EAE/B;AAAErT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAF+B,CA5E5B;AAgFPqT,IAAAA,0BAA0B,EAAE,CACxB,6EADwB,EAExB;AAAEtT,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,eAAD;AAAZ;AAAb,KAFwB;AAhFrB,GA/2BG;AAo8BdsT,EAAAA,KAAK,EAAE;AACHC,IAAAA,gBAAgB,EAAE,CAAC,oDAAD,CADf;AAEHC,IAAAA,wBAAwB,EAAE,CACtB,2EADsB,EAEtB,EAFsB,EAGtB;AAAEC,MAAAA,SAAS,EAAE;AAAb,KAHsB,CAFvB;AAOHpE,IAAAA,eAAe,EAAE,CAAC,oDAAD,CAPd;AAQHqE,IAAAA,sBAAsB,EAAE,CACpB,yFADoB,EAEpB,EAFoB,EAGpB;AAAED,MAAAA,SAAS,EAAE;AAAb,KAHoB,CARrB;AAaHE,IAAAA,yBAAyB,EAAE,CACvB,4EADuB,EAEvB,EAFuB,EAGvB;AAAEF,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAbxB;AAkBHG,IAAAA,yBAAyB,EAAE,CACvB,4EADuB,EAEvB,EAFuB,EAGvB;AAAEH,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAlBxB;AAuBHI,IAAAA,iBAAiB,EAAE,CAAC,oDAAD,CAvBhB;AAwBHC,IAAAA,wBAAwB,EAAE,CACtB,gDADsB,EAEtB;AAAE/T,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAFsB,CAxBvB;AA4BH+T,IAAAA,cAAc,EAAE,CAAC,mDAAD,CA5Bb;AA6BHC,IAAAA,0BAA0B,EAAE,CACxB,8CADwB,CA7BzB;AAgCHC,IAAAA,mBAAmB,EAAE,CACjB,0DADiB,CAhClB;AAmCHC,IAAAA,+BAA+B,EAAE,CAC7B,6EAD6B,EAE7B;AAAEnU,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAF6B,CAnC9B;AAuCHmU,IAAAA,kBAAkB,EAAE,CAAC,2CAAD,CAvCjB;AAwCHC,IAAAA,eAAe,EAAE,CAAC,iCAAD,CAxCd;AAyCHC,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CAzCf;AA0CHC,IAAAA,sBAAsB,EAAE,CACpB,iEADoB,CA1CrB;AA6CHC,IAAAA,mBAAmB,EAAE,CAAC,uCAAD,CA7ClB;AA8CH/E,IAAAA,0BAA0B,EAAE,CAAC,kBAAD,CA9CzB;AA+CHgF,IAAAA,UAAU,EAAE,CAAC,kCAAD,CA/CT;AAgDHC,IAAAA,WAAW,EAAE,CAAC,wBAAD,CAhDV;AAiDHC,IAAAA,yBAAyB,EAAE,CACvB,2DADuB,CAjDxB;AAoDHC,IAAAA,0BAA0B,EAAE,CAAC,2CAAD,CApDzB;AAqDHC,IAAAA,eAAe,EAAE,CACb,kCADa,EAEb;AAAE7U,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,YAAD;AAAZ;AAAb,KAFa,CArDd;AAyDH6U,IAAAA,aAAa,EAAE,CAAC,qCAAD,CAzDZ;AA0DHC,IAAAA,mBAAmB,EAAE,CACjB,uDADiB,EAEjB;AAAE/U,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,UAAD;AAAZ;AAAb,KAFiB,CA1DlB;AA8DHqM,IAAAA,aAAa,EAAE,CAAC,kCAAD,CA9DZ;AA+DH0I,IAAAA,iBAAiB,EAAE,CAAC,qDAAD,CA/DhB;AAgEH7P,IAAAA,MAAM,EAAE,CAAC,8BAAD,CAhEL;AAiEH8P,IAAAA,wBAAwB,EAAE,CACtB,wEADsB,CAjEvB;AAoEHC,IAAAA,2BAA2B,EAAE,CACzB,0EADyB,CApE1B;AAuEHC,IAAAA,mBAAmB,EAAE,CACjB,8DADiB,CAvElB;AA0EHC,IAAAA,sBAAsB,EAAE,CACpB,2DADoB,CA1ErB;AA6EHC,IAAAA,mBAAmB,EAAE,CAAC,oDAAD,CA7ElB;AA8EHC,IAAAA,+BAA+B,EAAE,CAC7B,+EAD6B,EAE7B;AAAEtV,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAF6B,CA9E9B;AAkFHsV,IAAAA,eAAe,EAAE,CAAC,4CAAD,CAlFd;AAmFHC,IAAAA,gBAAgB,EAAE,CACd,0DADc,CAnFf;AAsFHC,IAAAA,UAAU,EAAE,CAAC,8CAAD,CAtFT;AAuFHC,IAAAA,gBAAgB,EAAE,CACd,0DADc,CAvFf;AA0FHC,IAAAA,eAAe,EAAE,CACb,oCADa,EAEb;AAAE3V,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,YAAD;AAAZ;AAAb,KAFa,CA1Fd;AA8FH2V,IAAAA,iCAAiC,EAAE,CAC/B,yFAD+B,CA9FhC;AAiGHC,IAAAA,aAAa,EAAE,CAAC,oDAAD,CAjGZ;AAkGHC,IAAAA,kBAAkB,EAAE,CAChB,yDADgB,CAlGjB;AAqGHvJ,IAAAA,aAAa,EAAE,CAAC,8CAAD,CArGZ;AAsGHwJ,IAAAA,6BAA6B,EAAE,CAC3B,uDAD2B,EAE3B;AAAE/V,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAF2B,CAtG5B;AA0GH+V,IAAAA,0BAA0B,EAAE,CACxB,mDADwB,EAExB;AAAEhW,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAFwB,CA1GzB;AA8GHgW,IAAAA,eAAe,EAAE,CACb,yCADa,EAEb,EAFa,EAGb;AAAEva,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,wBAAV;AAAX,KAHa,CA9Gd;AAmHHwa,IAAAA,sBAAsB,EAAE,CAAC,yCAAD,CAnHrB;AAoHHC,IAAAA,sBAAsB,EAAE,CAAC,yCAAD,CApHrB;AAqHHC,IAAAA,4BAA4B,EAAE,CAC1B,oDAD0B,EAE1B;AAAEpW,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAF0B,CArH3B;AAyHHoW,IAAAA,yBAAyB,EAAE,CACvB,gDADuB,EAEvB;AAAErW,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,QAAD;AAAZ;AAAb,KAFuB,CAzHxB;AA6HH2C,IAAAA,GAAG,EAAE,CAAC,2BAAD,CA7HF;AA8HH0T,IAAAA,qBAAqB,EAAE,CACnB,qEADmB,CA9HpB;AAiIHC,IAAAA,wBAAwB,EAAE,CACtB,uEADsB,CAjIvB;AAoIHC,IAAAA,kBAAkB,EAAE,CAAC,wCAAD,CApIjB;AAqIHC,IAAAA,yBAAyB,EAAE,CACvB,wFADuB,CArIxB;AAwIHC,IAAAA,YAAY,EAAE,CACV,kCADU,EAEV;AAAE1W,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAFU,CAxIX;AA4IH0W,IAAAA,kCAAkC,EAAE,CAChC,0EADgC,CA5IjC;AA+IHC,IAAAA,SAAS,EAAE,CAAC,6CAAD,CA/IR;AAgJHC,IAAAA,mBAAmB,EAAE,CACjB,wDADiB,CAhJlB;AAmJHC,IAAAA,SAAS,EAAE,CAAC,0CAAD,CAnJR;AAoJHC,IAAAA,qBAAqB,EAAE,CAAC,gDAAD,CApJpB;AAqJHC,IAAAA,8BAA8B,EAAE,CAC5B,+DAD4B,CArJ7B;AAwJHC,IAAAA,uBAAuB,EAAE,CAAC,gDAAD,CAxJtB;AAyJHvQ,IAAAA,SAAS,EAAE,CAAC,yCAAD,CAzJR;AA0JHwQ,IAAAA,sBAAsB,EAAE,CAAC,iDAAD,CA1JrB;AA2JHC,IAAAA,gBAAgB,EAAE,CAAC,iDAAD,CA3Jf;AA4JHC,IAAAA,4BAA4B,EAAE,CAC1B,4EAD0B,EAE1B;AAAEpX,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAF0B,CA5J3B;AAgKHoX,IAAAA,0BAA0B,EAAE,CAAC,6CAAD,CAhKzB;AAiKHC,IAAAA,UAAU,EAAE,CAAC,2CAAD,CAjKT;AAkKHC,IAAAA,oBAAoB,EAAE,CAAC,8CAAD,CAlKnB;AAmKHC,IAAAA,YAAY,EAAE,CAAC,yCAAD,CAnKX;AAoKHC,IAAAA,aAAa,EAAE,CAAC,uDAAD,CApKZ;AAqKHC,IAAAA,mBAAmB,EAAE,CACjB,4EADiB,CArKlB;AAwKHC,IAAAA,cAAc,EAAE,CACZ,2DADY,CAxKb;AA2KHC,IAAAA,mBAAmB,EAAE,CAAC,+CAAD,CA3KlB;AA4KHC,IAAAA,gBAAgB,EAAE,CAAC,2CAAD,CA5Kf;AA6KHC,IAAAA,QAAQ,EAAE,CAAC,iCAAD,CA7KP;AA8KHC,IAAAA,aAAa,EAAE,CAAC,mDAAD,CA9KZ;AA+KHC,IAAAA,mBAAmB,EAAE,CAAC,wCAAD,CA/KlB;AAgLHC,IAAAA,qBAAqB,EAAE,CAAC,+CAAD,CAhLpB;AAiLHC,IAAAA,8BAA8B,EAAE,CAC5B,sFAD4B,CAjL7B;AAoLHC,IAAAA,iBAAiB,EAAE,CAAC,4CAAD,CApLhB;AAqLHC,IAAAA,SAAS,EAAE,CAAC,kCAAD,CArLR;AAsLHC,IAAAA,oBAAoB,EAAE,CAAC,wCAAD,CAtLnB;AAuLHC,IAAAA,UAAU,EAAE,CAAC,iDAAD,CAvLT;AAwLHC,IAAAA,eAAe,EAAE,CAAC,sDAAD,CAxLd;AAyLHC,IAAAA,eAAe,EAAE,CAAC,+CAAD,CAzLd;AA0LHC,IAAAA,yBAAyB,EAAE,CACvB,+EADuB,CA1LxB;AA6LHC,IAAAA,mCAAmC,EAAE,CACjC,2EADiC,CA7LlC;AAgMHC,IAAAA,WAAW,EAAE,CAAC,iDAAD,CAhMV;AAiMHC,IAAAA,eAAe,EAAE,CAAC,qDAAD,CAjMd;AAkMHC,IAAAA,mCAAmC,EAAE,CACjC,2EADiC,CAlMlC;AAqMHC,IAAAA,QAAQ,EAAE,CAAC,yCAAD,CArMP;AAsMHpM,IAAAA,UAAU,EAAE,CAAC,2CAAD,CAtMT;AAuMHqM,IAAAA,uBAAuB,EAAE,CACrB,kDADqB,CAvMtB;AA0MHC,IAAAA,YAAY,EAAE,CAAC,oCAAD,CA1MX;AA2MHC,IAAAA,yBAAyB,EAAE,CACvB,oEADuB,EAEvB;AAAEjZ,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAFuB,CA3MxB;AA+MHiQ,IAAAA,iBAAiB,EAAE,CAAC,yCAAD,CA/MhB;AAgNHgJ,IAAAA,qBAAqB,EAAE,CACnB,yDADmB,CAhNpB;AAmNHC,IAAAA,yBAAyB,EAAE,CAAC,oCAAD,CAnNxB;AAoNHC,IAAAA,wBAAwB,EAAE,CACtB,kDADsB,CApNvB;AAuNH1T,IAAAA,WAAW,EAAE,CAAC,mCAAD,CAvNV;AAwNH2T,IAAAA,gBAAgB,EAAE,CAAC,wCAAD,CAxNf;AAyNHC,IAAAA,cAAc,EAAE,CAAC,gCAAD,CAzNb;AA0NHC,IAAAA,sBAAsB,EAAE,CACpB,gEADoB,CA1NrB;AA6NHC,IAAAA,eAAe,EAAE,CAAC,uCAAD,CA7Nd;AA8NHxQ,IAAAA,wBAAwB,EAAE,CAAC,iBAAD,CA9NvB;AA+NHC,IAAAA,UAAU,EAAE,CAAC,uBAAD,CA/NT;AAgOHtD,IAAAA,WAAW,EAAE,CAAC,6BAAD,CAhOV;AAiOHC,IAAAA,SAAS,EAAE,CAAC,iCAAD,CAjOR;AAkOH6T,IAAAA,eAAe,EAAE,CAAC,uCAAD,CAlOd;AAmOHC,IAAAA,mCAAmC,EAAE,CAAC,kCAAD,CAnOlC;AAoOHC,IAAAA,aAAa,EAAE,CAAC,qCAAD,CApOZ;AAqOHC,IAAAA,eAAe,EAAE,CAAC,wCAAD,CArOd;AAsOH/T,IAAAA,UAAU,EAAE,CAAC,mBAAD,CAtOT;AAuOHgU,IAAAA,oCAAoC,EAAE,CAClC,sDADkC,EAElC;AAAE7Z,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAFkC,CAvOnC;AA2OH6Z,IAAAA,iBAAiB,EAAE,CACf,wDADe,CA3OhB;AA8OHC,IAAAA,YAAY,EAAE,CAAC,oCAAD,CA9OX;AA+OHC,IAAAA,QAAQ,EAAE,CAAC,gCAAD,CA/OP;AAgPHC,IAAAA,SAAS,EAAE,CAAC,iCAAD,CAhPR;AAiPH5M,IAAAA,YAAY,EAAE,CAAC,iCAAD,CAjPX;AAkPHoE,IAAAA,KAAK,EAAE,CAAC,mCAAD,CAlPJ;AAmPHnE,IAAAA,WAAW,EAAE,CAAC,kDAAD,CAnPV;AAoPH4M,IAAAA,2BAA2B,EAAE,CACzB,6EADyB,EAEzB,EAFyB,EAGzB;AAAExG,MAAAA,SAAS,EAAE;AAAb,KAHyB,CApP1B;AAyPHpD,IAAAA,kBAAkB,EAAE,CAChB,uDADgB,CAzPjB;AA4PH6J,IAAAA,yBAAyB,EAAE,CACvB,2FADuB,EAEvB,EAFuB,EAGvB;AAAEzG,MAAAA,SAAS,EAAE;AAAb,KAHuB,CA5PxB;AAiQH0G,IAAAA,2BAA2B,EAAE,CACzB,kFADyB,CAjQ1B;AAoQHC,IAAAA,4BAA4B,EAAE,CAC1B,8EAD0B,EAE1B,EAF0B,EAG1B;AAAE3G,MAAAA,SAAS,EAAE;AAAb,KAH0B,CApQ3B;AAyQH4G,IAAAA,4BAA4B,EAAE,CAC1B,8EAD0B,EAE1B,EAF0B,EAG1B;AAAE5G,MAAAA,SAAS,EAAE;AAAb,KAH0B,CAzQ3B;AA8QH6G,IAAAA,YAAY,EAAE,CAAC,qDAAD,CA9QX;AA+QHC,IAAAA,gBAAgB,EAAE,CACd,kCADc,EAEd;AAAExa,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAFc,CA/Qf;AAmRHwa,IAAAA,iBAAiB,EAAE,CAAC,yCAAD,CAnRhB;AAoRHC,IAAAA,wBAAwB,EAAE,CACtB,wEADsB,CApRvB;AAuRHC,IAAAA,wBAAwB,EAAE,CACtB,0EADsB,EAEtB,EAFsB,EAGtB;AAAEjH,MAAAA,SAAS,EAAE;AAAb,KAHsB,CAvRvB;AA4RHkH,IAAAA,sBAAsB,EAAE,CACpB,wFADoB,EAEpB,EAFoB,EAGpB;AAAElH,MAAAA,SAAS,EAAE;AAAb,KAHoB,CA5RrB;AAiSHmH,IAAAA,yBAAyB,EAAE,CACvB,2EADuB,EAEvB,EAFuB,EAGvB;AAAEnH,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAjSxB;AAsSHoH,IAAAA,yBAAyB,EAAE,CACvB,2EADuB,EAEvB,EAFuB,EAGvB;AAAEpH,MAAAA,SAAS,EAAE;AAAb,KAHuB,CAtSxB;AA2SHqH,IAAAA,eAAe,EAAE,CAAC,kDAAD,CA3Sd;AA4SHC,IAAAA,QAAQ,EAAE,CAAC,qCAAD,CA5SP;AA6SH5X,IAAAA,MAAM,EAAE,CAAC,6BAAD,CA7SL;AA8SH6X,IAAAA,sBAAsB,EAAE,CACpB,wDADoB,CA9SrB;AAiTHC,IAAAA,mBAAmB,EAAE,CAAC,mDAAD,CAjTlB;AAkTHC,IAAAA,+BAA+B,EAAE,CAAC,iCAAD,CAlT9B;AAmTHC,IAAAA,gBAAgB,EAAE,CACd,yDADc,CAnTf;AAsTHC,IAAAA,iCAAiC,EAAE,CAC/B,wFAD+B,CAtThC;AAyTHC,IAAAA,aAAa,EAAE,CAAC,mDAAD,CAzTZ;AA0THC,IAAAA,kBAAkB,EAAE,CAChB,wDADgB,CA1TjB;AA6THC,IAAAA,0BAA0B,EAAE,CACxB,iFADwB,EAExB,EAFwB,EAGxB;AAAE9f,MAAAA,OAAO,EAAE,CAAC,OAAD,EAAU,6BAAV;AAAX,KAHwB,CA7TzB;AAkUH+f,IAAAA,2BAA2B,EAAE,CACzB,iFADyB,CAlU1B;AAqUH1N,IAAAA,aAAa,EAAE,CAAC,6CAAD,CArUZ;AAsUH2N,IAAAA,0BAA0B,EAAE,CACxB,oDADwB,CAtUzB;AAyUHC,IAAAA,kBAAkB,EAAE,CAChB,sEADgB,EAEhB;AAAEC,MAAAA,OAAO,EAAE;AAAX,KAFgB;AAzUjB,GAp8BO;AAkxCdC,EAAAA,MAAM,EAAE;AACJC,IAAAA,IAAI,EAAE,CAAC,kBAAD,CADF;AAEJC,IAAAA,OAAO,EAAE,CAAC,qBAAD,EAAwB;AAAE/b,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAAxB,CAFL;AAGJ+b,IAAAA,qBAAqB,EAAE,CAAC,oBAAD,CAHnB;AAIJC,IAAAA,MAAM,EAAE,CAAC,oBAAD,CAJJ;AAKJ1I,IAAAA,KAAK,EAAE,CAAC,0BAAD,CALH;AAMJ2I,IAAAA,MAAM,EAAE,CAAC,oBAAD,EAAuB;AAAElc,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,OAAD;AAAZ;AAAb,KAAvB,CANJ;AAOJkc,IAAAA,KAAK,EAAE,CAAC,mBAAD;AAPH,GAlxCM;AA2xCdC,EAAAA,cAAc,EAAE;AACZ7Y,IAAAA,QAAQ,EAAE,CACN,iEADM,CADE;AAIZM,IAAAA,iBAAiB,EAAE,CAAC,kDAAD,CAJP;AAKZG,IAAAA,WAAW,EAAE,CACT,mEADS;AALD,GA3xCF;AAoyCdqY,EAAAA,KAAK,EAAE;AACHC,IAAAA,iCAAiC,EAAE,CAC/B,0DAD+B,CADhC;AAIHC,IAAAA,kCAAkC,EAAE,CAChC,yDADgC,EAEhC;AAAEvc,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFgC,CAJjC;AAQHuc,IAAAA,+BAA+B,EAAE,CAC7B,wDAD6B,CAR9B;AAWHC,IAAAA,+BAA+B,EAAE,CAC7B,yDAD6B,EAE7B;AAAEzc,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAF6B,CAX9B;AAeHyc,IAAAA,4BAA4B,EAAE,CAC1B,wDAD0B,CAf3B;AAkBHha,IAAAA,MAAM,EAAE,CAAC,wBAAD,CAlBL;AAmBHia,IAAAA,4BAA4B,EAAE,CAC1B,6EAD0B,CAnB3B;AAsBHC,IAAAA,qBAAqB,EAAE,CAAC,gDAAD,CAtBpB;AAuBHC,IAAAA,4BAA4B,EAAE,CAC1B,gGAD0B,CAvB3B;AA0BHC,IAAAA,qBAAqB,EAAE,CACnB,sEADmB,CA1BpB;AA6BHC,IAAAA,WAAW,EAAE,CAAC,sCAAD,CA7BV;AA8BHC,IAAAA,SAAS,EAAE,CAAC,mCAAD,CA9BR;AA+BHC,IAAAA,yBAAyB,EAAE,CACvB,6FADuB,CA/BxB;AAkCHC,IAAAA,kBAAkB,EAAE,CAChB,mEADgB,CAlCjB;AAqCHC,IAAAA,yBAAyB,EAAE,CACvB,0DADuB,CArCxB;AAwCH3X,IAAAA,IAAI,EAAE,CAAC,uBAAD,CAxCH;AAyCH4X,IAAAA,cAAc,EAAE,CAAC,yCAAD,CAzCb;AA0CHC,IAAAA,2BAA2B,EAAE,CACzB,4EADyB,CA1C1B;AA6CHC,IAAAA,oBAAoB,EAAE,CAAC,+CAAD,CA7CnB;AA8CHtU,IAAAA,wBAAwB,EAAE,CAAC,iBAAD,CA9CvB;AA+CHuU,IAAAA,gBAAgB,EAAE,CAAC,2CAAD,CA/Cf;AAgDHC,IAAAA,2BAA2B,EAAE,CACzB,+CADyB,CAhD1B;AAmDHC,IAAAA,iBAAiB,EAAE,CACf,4CADe,EAEf;AAAEzd,MAAAA,SAAS,EAAE;AAAEC,QAAAA,QAAQ,EAAE,CAAC,SAAD;AAAZ;AAAb,KAFe,CAnDhB;AAuDHyd,IAAAA,cAAc,EAAE,CAAC,yCAAD,CAvDb;AAwDHC,IAAAA,4BAA4B,EAAE,CAC1B,6DAD0B,CAxD3B;AA2DHC,IAAAA,kBAAkB,EAAE,CAChB,4DADgB,CA3DjB;AA8DHC,IAAAA,eAAe,EAAE,CACb,2DADa,CA9Dd;AAiEHC,IAAAA,4BAA4B,EAAE,CAC1B,+FAD0B,CAjE3B;AAoEHC,IAAAA,qBAAqB,EAAE,CACnB,qEADmB,CApEpB;AAuEHC,IAAAA,WAAW,EAAE,CAAC,qCAAD;AAvEV,GApyCO;AA62Cd7B,EAAAA,KAAK,EAAE;AACH8B,IAAAA,wBAAwB,EAAE,CAAC,mBAAD,CADvB;AAEHC,IAAAA,KAAK,EAAE,CAAC,6BAAD,CAFJ;AAGHC,IAAAA,YAAY,EAAE,CAAC,6BAAD,CAHX;AAIHC,IAAAA,qBAAqB,EAAE,CAAC,+CAAD,CAJpB;AAKHC,IAAAA,oCAAoC,EAAE,CAAC,gCAAD,CALnC;AAMHC,IAAAA,4BAA4B,EAAE,CAAC,qBAAD,CAN3B;AAOHC,IAAAA,kCAAkC,EAAE,CAAC,iBAAD,CAPjC;AAQHC,IAAAA,2BAA2B,EAAE,CAAC,qBAAD,CAR1B;AASHC,IAAAA,4BAA4B,EAAE,CAAC,oCAAD,CAT3B;AAUHC,IAAAA,kCAAkC,EAAE,CAAC,4BAAD,CAVjC;AAWHC,IAAAA,MAAM,EAAE,CAAC,gCAAD,CAXL;AAYHne,IAAAA,gBAAgB,EAAE,CAAC,WAAD,CAZf;AAaHoe,IAAAA,aAAa,EAAE,CAAC,uBAAD,CAbZ;AAcHC,IAAAA,iBAAiB,EAAE,CAAC,iCAAD,CAdhB;AAeHC,IAAAA,yBAAyB,EAAE,CAAC,iCAAD,CAfxB;AAgBHC,IAAAA,+BAA+B,EAAE,CAAC,yBAAD,CAhB9B;AAiBHvZ,IAAAA,IAAI,EAAE,CAAC,YAAD,CAjBH;AAkBHwZ,IAAAA,0BAA0B,EAAE,CAAC,kBAAD,CAlBzB;AAmBHC,IAAAA,0BAA0B,EAAE,CAAC,kBAAD,CAnBzB;AAoBHC,IAAAA,2BAA2B,EAAE,CAAC,qBAAD,CApB1B;AAqBHC,IAAAA,iCAAiC,EAAE,CAAC,qBAAD,CArBhC;AAsBHC,IAAAA,oBAAoB,EAAE,CAAC,iCAAD,CAtBnB;AAuBHC,IAAAA,oBAAoB,EAAE,CAAC,iCAAD,CAvBnB;AAwBHC,IAAAA,2BAA2B,EAAE,CAAC,oBAAD,CAxB1B;AAyBHC,IAAAA,kBAAkB,EAAE,CAAC,gCAAD,CAzBjB;AA0BHC,IAAAA,gCAAgC,EAAE,CAAC,yBAAD,CA1B/B;AA2BHC,IAAAA,qBAAqB,EAAE,CAAC,4BAAD,CA3BpB;AA4BHC,IAAAA,iCAAiC,EAAE,CAAC,gBAAD,CA5BhC;AA6BHC,IAAAA,yCAAyC,EAAE,CAAC,8BAAD,CA7BxC;AA8BHC,IAAAA,OAAO,EAAE,CAAC,gCAAD,CA9BN;AA+BHC,IAAAA,QAAQ,EAAE,CAAC,mCAAD,CA/BP;AAgCHC,IAAAA,mBAAmB,EAAE,CAAC,aAAD;AAhClB;AA72CO,CAAlB;;ACAO,MAAMC,OAAO,GAAG,mBAAhB;;ACAA,SAASC,kBAAT,CAA4BC,OAA5B,EAAqCC,YAArC,EAAmD;AACtD,QAAMC,UAAU,GAAG,EAAnB;;AACA,OAAK,MAAM,CAACC,KAAD,EAAQC,SAAR,CAAX,IAAiCC,MAAM,CAACC,OAAP,CAAeL,YAAf,CAAjC,EAA+D;AAC3D,SAAK,MAAM,CAACM,UAAD,EAAaC,QAAb,CAAX,IAAqCH,MAAM,CAACC,OAAP,CAAeF,SAAf,CAArC,EAAgE;AAC5D,YAAM,CAACK,KAAD,EAAQC,QAAR,EAAkBC,WAAlB,IAAiCH,QAAvC;AACA,YAAM,CAACI,MAAD,EAASC,GAAT,IAAgBJ,KAAK,CAACK,KAAN,CAAY,GAAZ,CAAtB;AACA,YAAMC,gBAAgB,GAAGV,MAAM,CAACW,MAAP,CAAc;AAAEJ,QAAAA,MAAF;AAAUC,QAAAA;AAAV,OAAd,EAA+BH,QAA/B,CAAzB;;AACA,UAAI,CAACR,UAAU,CAACC,KAAD,CAAf,EAAwB;AACpBD,QAAAA,UAAU,CAACC,KAAD,CAAV,GAAoB,EAApB;AACH;;AACD,YAAMc,YAAY,GAAGf,UAAU,CAACC,KAAD,CAA/B;;AACA,UAAIQ,WAAJ,EAAiB;AACbM,QAAAA,YAAY,CAACV,UAAD,CAAZ,GAA2BW,QAAQ,CAAClB,OAAD,EAAUG,KAAV,EAAiBI,UAAjB,EAA6BQ,gBAA7B,EAA+CJ,WAA/C,CAAnC;AACA;AACH;;AACDM,MAAAA,YAAY,CAACV,UAAD,CAAZ,GAA2BP,OAAO,CAACmB,OAAR,CAAgBT,QAAhB,CAAyBK,gBAAzB,CAA3B;AACH;AACJ;;AACD,SAAOb,UAAP;AACH;;AACD,SAASgB,QAAT,CAAkBlB,OAAlB,EAA2BG,KAA3B,EAAkCI,UAAlC,EAA8CG,QAA9C,EAAwDC,WAAxD,EAAqE;AACjE,QAAMS,mBAAmB,GAAGpB,OAAO,CAACmB,OAAR,CAAgBT,QAAhB,CAAyBA,QAAzB,CAA5B;AACA;;AACA,WAASW,eAAT,CAAyB,GAAGC,IAA5B,EAAkC;AAC9B;AACA,QAAIC,OAAO,GAAGH,mBAAmB,CAACZ,QAApB,CAA6BhP,KAA7B,CAAmC,GAAG8P,IAAtC,CAAd,CAF8B;;AAI9B,QAAIX,WAAW,CAAClN,SAAhB,EAA2B;AACvB8N,MAAAA,OAAO,GAAGlB,MAAM,CAACW,MAAP,CAAc,EAAd,EAAkBO,OAAlB,EAA2B;AACjCC,QAAAA,IAAI,EAAED,OAAO,CAACZ,WAAW,CAAClN,SAAb,CADoB;AAEjC,SAACkN,WAAW,CAAClN,SAAb,GAAyBgO;AAFQ,OAA3B,CAAV;AAIA,aAAOL,mBAAmB,CAACG,OAAD,CAA1B;AACH;;AACD,QAAIZ,WAAW,CAACllB,OAAhB,EAAyB;AACrB,YAAM,CAACimB,QAAD,EAAWC,aAAX,IAA4BhB,WAAW,CAACllB,OAA9C;AACAukB,MAAAA,OAAO,CAAC4B,GAAR,CAAYC,IAAZ,CAAkB,WAAU1B,KAAM,IAAGI,UAAW,kCAAiCmB,QAAS,IAAGC,aAAc,IAA3G;AACH;;AACD,QAAIhB,WAAW,CAAC5N,UAAhB,EAA4B;AACxBiN,MAAAA,OAAO,CAAC4B,GAAR,CAAYC,IAAZ,CAAiBlB,WAAW,CAAC5N,UAA7B;AACH;;AACD,QAAI4N,WAAW,CAACpd,iBAAhB,EAAmC;AAC/B;AACA,YAAMge,OAAO,GAAGH,mBAAmB,CAACZ,QAApB,CAA6BhP,KAA7B,CAAmC,GAAG8P,IAAtC,CAAhB;;AACA,WAAK,MAAM,CAACQ,IAAD,EAAOC,KAAP,CAAX,IAA4B1B,MAAM,CAACC,OAAP,CAAeK,WAAW,CAACpd,iBAA3B,CAA5B,EAA2E;AACvE,YAAIue,IAAI,IAAIP,OAAZ,EAAqB;AACjBvB,UAAAA,OAAO,CAAC4B,GAAR,CAAYC,IAAZ,CAAkB,IAAGC,IAAK,0CAAyC3B,KAAM,IAAGI,UAAW,aAAYwB,KAAM,WAAzG;;AACA,cAAI,EAAEA,KAAK,IAAIR,OAAX,CAAJ,EAAyB;AACrBA,YAAAA,OAAO,CAACQ,KAAD,CAAP,GAAiBR,OAAO,CAACO,IAAD,CAAxB;AACH;;AACD,iBAAOP,OAAO,CAACO,IAAD,CAAd;AACH;AACJ;;AACD,aAAOV,mBAAmB,CAACG,OAAD,CAA1B;AACH,KA/B6B;;;AAiC9B,WAAOH,mBAAmB,CAAC,GAAGE,IAAJ,CAA1B;AACH;;AACD,SAAOjB,MAAM,CAACW,MAAP,CAAcK,eAAd,EAA+BD,mBAA/B,CAAP;AACH;;ACxDM,SAASY,mBAAT,CAA6BhC,OAA7B,EAAsC;AACzC,QAAMiC,GAAG,GAAGlC,kBAAkB,CAACC,OAAD,EAAUkC,SAAV,CAA9B;AACA,SAAO;AACHC,IAAAA,IAAI,EAAEF;AADH,GAAP;AAGH;AACDD,mBAAmB,CAAClC,OAApB,GAA8BA,OAA9B;AACA,AAAO,SAASsC,yBAAT,CAAmCpC,OAAnC,EAA4C;AAC/C,QAAMiC,GAAG,GAAGlC,kBAAkB,CAACC,OAAD,EAAUkC,SAAV,CAA9B;AACA,2CACOD,GADP;AAEIE,IAAAA,IAAI,EAAEF;AAFV;AAIH;AACDG,yBAAyB,CAACtC,OAA1B,GAAoCA,OAApC;;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js index f53e3bfd..5f781520 100644 --- a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js @@ -3,6 +3,9 @@ const Endpoints = { addSelectedRepoToOrgSecret: [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve", + ], cancelWorkflowRun: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel", ], @@ -223,6 +226,10 @@ const Endpoints = { "POST /content_references/{content_reference_id}/attachments", { mediaType: { previews: ["corsair"] } }, ], + createContentAttachmentForRepo: [ + "POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments", + { mediaType: { previews: ["corsair"] } }, + ], createFromManifest: ["POST /app-manifests/{code}/conversions"], createInstallationAccessToken: [ "POST /app/installations/{installation_id}/access_tokens", @@ -321,9 +328,14 @@ const Endpoints = { "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", ], getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + ], listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], listAlertsInstances: [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] }, ], listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], updateAlert: [ @@ -882,6 +894,10 @@ const Endpoints = { "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { mediaType: { previews: ["squirrel-girl"] } }, ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions", + { mediaType: { previews: ["squirrel-girl"] } }, + ], createForTeamDiscussionCommentInOrg: [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { mediaType: { previews: ["squirrel-girl"] } }, @@ -975,6 +991,9 @@ const Endpoints = { { mediaType: { previews: ["dorian"] } }, ], compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}", + ], createCommitComment: [ "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments", ], diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js index 16f5964c..6250d76e 100644 --- a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js @@ -1 +1 @@ -export const VERSION = "5.1.1"; +export const VERSION = "5.3.1"; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts index 6f3a4a9b..df4fc017 100644 --- a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts @@ -12,6 +12,20 @@ export declare type RestEndpointMethods = { url: string; }>; }; + /** + * **Note:** This endpoint is currently in beta and is subject to change. + * + * Approves a workflow run for a pull request from a public fork of a first time contributor. For more information, see ["Approving workflow runs from public forks](https://docs.github.com/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + approveWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["approveWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; /** * Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ @@ -1380,7 +1394,7 @@ export declare type RestEndpointMethods = { }>; }; /** - * Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment. + * **Deprecated:** use `apps.createContentAttachmentForRepo()` (`POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments`) instead. Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment. * * The app must create a content attachment within six hours of the content reference URL being posted. See "[Using content attachments](https://docs.github.com/apps/using-content-attachments/)" for details about content attachments. * @@ -1393,6 +1407,20 @@ export declare type RestEndpointMethods = { url: string; }>; }; + /** + * Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` and `repository` `full_name` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment. + * + * The app must create a content attachment within six hours of the content reference URL being posted. See "[Using content attachments](https://docs.github.com/apps/using-content-attachments/)" for details about content attachments. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + createContentAttachmentForRepo: { + (params?: RestEndpointMethodTypes["apps"]["createContentAttachmentForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; /** * Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`. */ @@ -2109,6 +2137,16 @@ export declare type RestEndpointMethods = { url: string; }>; }; + /** + * Lists all instances of the specified code scanning alert. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + listAlertInstances: { + (params?: RestEndpointMethodTypes["codeScanning"]["listAlertInstances"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; /** * Lists all open code scanning alerts for the default branch (usually `main` * or `master`). You must use an access token with the `security_events` scope to use @@ -2129,6 +2167,7 @@ export declare type RestEndpointMethods = { }; /** * Lists all instances of the specified code scanning alert. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` read permission to use this endpoint. + * @deprecated octokit.rest.codeScanning.listAlertsInstances() has been renamed to octokit.rest.codeScanning.listAlertInstances() (2021-04-30) */ listAlertsInstances: { (params?: RestEndpointMethodTypes["codeScanning"]["listAlertsInstances"]["parameters"]): Promise; @@ -2176,8 +2215,8 @@ export declare type RestEndpointMethods = { * Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint. * * There are two places where you can upload code scanning results. - * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/github/finding-security-vulnerabilities-and-errors-in-your-code/triaging-code-scanning-alerts-in-pull-requests)." - * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/github/finding-security-vulnerabilities-and-errors-in-your-code/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." + * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests)." + * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." * * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: * @@ -4333,11 +4372,6 @@ export declare type RestEndpointMethods = { url: string; }>; }; - /** - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by the `pull_request` key. - * - * Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ createCard: { (params?: RestEndpointMethodTypes["projects"]["createCard"]["parameters"]): Promise; defaults: RequestInterface["defaults"]; @@ -4843,7 +4877,7 @@ export declare type RestEndpointMethods = { }; reactions: { /** - * Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this commit comment. + * Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with an HTTP `200` status means that you already added the reaction type to this commit comment. */ createForCommitComment: { (params?: RestEndpointMethodTypes["reactions"]["createForCommitComment"]["parameters"]): Promise; @@ -4853,7 +4887,7 @@ export declare type RestEndpointMethods = { }>; }; /** - * Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with a `Status: 200 OK` means that you already added the reaction type to this issue. + * Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with an HTTP `200` status means that you already added the reaction type to this issue. */ createForIssue: { (params?: RestEndpointMethodTypes["reactions"]["createForIssue"]["parameters"]): Promise; @@ -4863,7 +4897,7 @@ export declare type RestEndpointMethods = { }>; }; /** - * Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this issue comment. + * Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with an HTTP `200` status means that you already added the reaction type to this issue comment. */ createForIssueComment: { (params?: RestEndpointMethodTypes["reactions"]["createForIssueComment"]["parameters"]): Promise; @@ -4873,7 +4907,7 @@ export declare type RestEndpointMethods = { }>; }; /** - * Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this pull request review comment. + * Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with an HTTP `200` status means that you already added the reaction type to this pull request review comment. */ createForPullRequestReviewComment: { (params?: RestEndpointMethodTypes["reactions"]["createForPullRequestReviewComment"]["parameters"]): Promise; @@ -4883,7 +4917,17 @@ export declare type RestEndpointMethods = { }>; }; /** - * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion comment. + * Create a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). A response with a `Status: 200 OK` means that you already added the reaction type to this release. + */ + createForRelease: { + (params?: RestEndpointMethodTypes["reactions"]["createForRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. * * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. */ @@ -4895,7 +4939,7 @@ export declare type RestEndpointMethods = { }>; }; /** - * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion. + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. * * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. */ @@ -5165,7 +5209,7 @@ export declare type RestEndpointMethods = { }>; }; /** - * Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. + * **Deprecated**: Use `repos.compareCommitsWithBasehead()` (`GET /repos/{owner}/{repo}/compare/{basehead}`) instead. Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. * * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. * @@ -5213,6 +5257,55 @@ export declare type RestEndpointMethods = { url: string; }>; }; + /** + * The `basehead` param is comprised of two parts: `base` and `head`. Both must be branch names in `repo`. To compare branches across other repositories in the same network as `repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + compareCommitsWithBasehead: { + (params?: RestEndpointMethodTypes["repos"]["compareCommitsWithBasehead"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; /** * Create a comment for a commit using its `:commit_sha`. * @@ -6494,8 +6587,8 @@ export declare type RestEndpointMethods = { /** * Lists all public repositories in the order that they were created. * - * Notes: - * - For GitHub Enterprise Server and GitHub AE, this endpoint will only list repositories available to all users on the enterprise. + * Note: + * - For GitHub Enterprise Server, this endpoint will only list repositories available to all users on the enterprise. * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of repositories. */ listPublic: { diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts index 79ad137d..8ab8c5ac 100644 --- a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts @@ -5,6 +5,10 @@ export declare type RestEndpointMethodTypes = { parameters: RequestParameters & Omit; response: Endpoints["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"]["response"]; }; + approveWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"]["response"]; + }; cancelWorkflowRun: { parameters: RequestParameters & Omit; response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"]["response"]; @@ -421,6 +425,10 @@ export declare type RestEndpointMethodTypes = { parameters: RequestParameters & Omit; response: Endpoints["POST /content_references/{content_reference_id}/attachments"]["response"]; }; + createContentAttachmentForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments"]["response"]; + }; createFromManifest: { parameters: RequestParameters & Omit; response: Endpoints["POST /app-manifests/{code}/conversions"]["response"]; @@ -635,6 +643,10 @@ export declare type RestEndpointMethodTypes = { parameters: RequestParameters & Omit; response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"]["response"]; }; + listAlertInstances: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"]["response"]; + }; listAlertsForRepo: { parameters: RequestParameters & Omit; response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts"]["response"]; @@ -1667,6 +1679,10 @@ export declare type RestEndpointMethodTypes = { parameters: RequestParameters & Omit; response: Endpoints["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"]["response"]; }; + createForRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"]["response"]; + }; createForTeamDiscussionCommentInOrg: { parameters: RequestParameters & Omit; response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["response"]; @@ -1765,6 +1781,10 @@ export declare type RestEndpointMethodTypes = { parameters: RequestParameters & Omit; response: Endpoints["GET /repos/{owner}/{repo}/compare/{base}...{head}"]["response"]; }; + compareCommitsWithBasehead: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/compare/{basehead}"]["response"]; + }; createCommitComment: { parameters: RequestParameters & Omit; response: Endpoints["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"]["response"]; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts index 04ceedfc..c32c7ab0 100644 --- a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts @@ -1 +1 @@ -export declare const VERSION = "5.1.1"; +export declare const VERSION = "5.3.1"; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js index 2318ce61..b6faf670 100644 --- a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js @@ -3,6 +3,9 @@ const Endpoints = { addSelectedRepoToOrgSecret: [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve", + ], cancelWorkflowRun: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel", ], @@ -223,6 +226,10 @@ const Endpoints = { "POST /content_references/{content_reference_id}/attachments", { mediaType: { previews: ["corsair"] } }, ], + createContentAttachmentForRepo: [ + "POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments", + { mediaType: { previews: ["corsair"] } }, + ], createFromManifest: ["POST /app-manifests/{code}/conversions"], createInstallationAccessToken: [ "POST /app/installations/{installation_id}/access_tokens", @@ -321,9 +328,14 @@ const Endpoints = { "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", ], getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + ], listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], listAlertsInstances: [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] }, ], listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], updateAlert: [ @@ -882,6 +894,10 @@ const Endpoints = { "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { mediaType: { previews: ["squirrel-girl"] } }, ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions", + { mediaType: { previews: ["squirrel-girl"] } }, + ], createForTeamDiscussionCommentInOrg: [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { mediaType: { previews: ["squirrel-girl"] } }, @@ -975,6 +991,9 @@ const Endpoints = { { mediaType: { previews: ["dorian"] } }, ], compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}", + ], createCommitComment: [ "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments", ], @@ -1404,7 +1423,7 @@ const Endpoints = { }, }; -const VERSION = "5.1.1"; +const VERSION = "5.3.1"; function endpointsToMethods(octokit, endpointsMap) { const newMethods = {}; diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map index 799c06da..96c2a03d 100644 --- a/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/generated/endpoints.js","../dist-src/version.js","../dist-src/endpoints-to-methods.js","../dist-src/index.js"],"sourcesContent":["const Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\",\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\",\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\",\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\",\n ],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\",\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\",\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\",\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\",\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\",\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\",\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\",\n ],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\",\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\",\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\",\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] },\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\",\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\",\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\",\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\",\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\",\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\",\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\",\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\",\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\",\n ],\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\",\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\",\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\",\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\",\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\",\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\",\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"],\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\n \"POST /content_references/{content_reference_id}/attachments\",\n { mediaType: { previews: [\"corsair\"] } },\n ],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\",\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\",\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\",\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\",\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\",\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\",\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"],\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\",\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\",\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\",\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\",\n ],\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\",\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\",\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } },\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\",\n ],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"],\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\n \"GET /codes_of_conduct\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n getConductCode: [\n \"GET /codes_of_conduct/{key}\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n getForRepo: [\n \"GET /repos/{owner}/{repo}/community/code_of_conduct\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n },\n emojis: { get: [\"GET /emojis\"] },\n enterpriseAdmin: {\n disableSelectedOrganizationGithubActionsEnterprise: [\n \"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n enableSelectedOrganizationGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n getAllowedActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n getGithubActionsPermissionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions\",\n ],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n setAllowedActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n setGithubActionsPermissionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions\",\n ],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"],\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"],\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"],\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] },\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\",\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] },\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] },\n ],\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\",\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n { mediaType: { previews: [\"mockingbird\"] } },\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\",\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"],\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } },\n ],\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"],\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getStatusForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listForAuthenticatedUser: [\n \"GET /user/migrations\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listForOrg: [\n \"GET /orgs/{org}/migrations\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listReposForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"],\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\",\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\",\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\",\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\",\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\",\n ],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"],\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\",\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] },\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\",\n ],\n },\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\",\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\",\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\",\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n },\n projects: {\n addCollaborator: [\n \"PUT /projects/{project_id}/collaborators/{username}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createCard: [\n \"POST /projects/columns/{column_id}/cards\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createColumn: [\n \"POST /projects/{project_id}/columns\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForAuthenticatedUser: [\n \"POST /user/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForOrg: [\n \"POST /orgs/{org}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForRepo: [\n \"POST /repos/{owner}/{repo}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n delete: [\n \"DELETE /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n deleteCard: [\n \"DELETE /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n deleteColumn: [\n \"DELETE /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n get: [\n \"GET /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getCard: [\n \"GET /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getColumn: [\n \"GET /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listCards: [\n \"GET /projects/columns/{column_id}/cards\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listCollaborators: [\n \"GET /projects/{project_id}/collaborators\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listColumns: [\n \"GET /projects/{project_id}/columns\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForOrg: [\n \"GET /orgs/{org}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForRepo: [\n \"GET /repos/{owner}/{repo}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForUser: [\n \"GET /users/{username}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n moveCard: [\n \"POST /projects/columns/cards/{card_id}/moves\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n moveColumn: [\n \"POST /projects/columns/{column_id}/moves\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n update: [\n \"PATCH /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n updateCard: [\n \"PATCH /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n updateColumn: [\n \"PATCH /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\",\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\",\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\",\n { mediaType: { previews: [\"lydian\"] } },\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteLegacy: [\n \"DELETE /reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n {\n deprecated: \"octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy\",\n },\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\n \"POST /repos/{owner}/{repo}/pages\",\n { mediaType: { previews: [\"switcheroo\"] } },\n ],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\",\n { mediaType: { previews: [\"baptiste\"] } },\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\",\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n deletePagesSite: [\n \"DELETE /repos/{owner}/{repo}/pages\",\n { mediaType: { previews: [\"switcheroo\"] } },\n ],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\",\n { mediaType: { previews: [\"london\"] } },\n ],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] },\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\",\n { mediaType: { previews: [\"london\"] } },\n ],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n ],\n getAllTopics: [\n \"GET /repos/{owner}/{repo}/topics\",\n { mediaType: { previews: [\"mercy\"] } },\n ],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n ],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\",\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\",\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\",\n { mediaType: { previews: [\"groot\"] } },\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n { mediaType: { previews: [\"groot\"] } },\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\",\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\n \"PUT /repos/{owner}/{repo}/topics\",\n { mediaType: { previews: [\"mercy\"] } },\n ],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] },\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" },\n ],\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\", { mediaType: { previews: [\"cloak\"] } }],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", { mediaType: { previews: [\"mercy\"] } }],\n users: [\"GET /search/users\"],\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n ],\n listProjectsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"],\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"],\n },\n};\nexport default Endpoints;\n","export const VERSION = \"5.1.1\";\n","export function endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({ method, url }, defaults);\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n const scopeMethods = newMethods[scope];\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args);\n // There are currently no other decorations than `.mapToData`\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined,\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n delete options[name];\n }\n }\n return requestWithDefaults(options);\n }\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n","import ENDPOINTS from \"./generated/endpoints\";\nimport { VERSION } from \"./version\";\nimport { endpointsToMethods } from \"./endpoints-to-methods\";\nexport function restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n rest: api,\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nexport function legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n ...api,\n rest: api,\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n"],"names":["ENDPOINTS"],"mappings":"AAAA,MAAM,SAAS,GAAG;AAClB,IAAI,OAAO,EAAE;AACb,QAAQ,0BAA0B,EAAE;AACpC,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,+CAA+C,CAAC;AAClF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,+CAA+C,CAAC;AAClF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,uEAAuE;AACnF,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,4FAA4F;AACxG,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,kDAAkD,CAAC;AAC7E,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,oDAAoD,CAAC;AACjF,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,kDAAkD,EAAE;AAC5D,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,iDAAiD,EAAE;AAC3D,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,2DAA2D,CAAC;AAClF,QAAQ,uBAAuB,EAAE;AACjC,YAAY,sFAAsF;AAClG,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,uCAAuC,EAAE;AACjD,YAAY,qCAAqC;AACjD,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,iDAAiD,CAAC;AACjF,QAAQ,eAAe,EAAE,CAAC,4CAA4C,CAAC;AACvE,QAAQ,YAAY,EAAE,CAAC,+CAA+C,CAAC;AACvE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,+CAA+C;AAC3D,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,SAAS,EAAE,uCAAuC,CAAC,EAAE;AAC7E,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,sDAAsD,CAAC;AAClF,QAAQ,aAAa,EAAE,CAAC,yDAAyD,CAAC;AAClF,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,6CAA6C,CAAC;AAClF,QAAQ,0BAA0B,EAAE;AACpC,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,2DAA2D,CAAC;AAClF,QAAQ,cAAc,EAAE,CAAC,iDAAiD,CAAC;AAC3E,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,6CAA6C,CAAC;AAC7E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,iCAAiC,CAAC;AAC3D,QAAQ,eAAe,EAAE,CAAC,2CAA2C,CAAC;AACtE,QAAQ,iBAAiB,EAAE,CAAC,6CAA6C,CAAC;AAC1E,QAAQ,4BAA4B,EAAE,CAAC,2CAA2C,CAAC;AACnF,QAAQ,6BAA6B,EAAE;AACvC,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,wDAAwD,EAAE;AAClE,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,2BAA2B,EAAE,CAAC,iCAAiC,CAAC;AACxE,QAAQ,4BAA4B,EAAE,CAAC,2CAA2C,CAAC;AACnF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,wCAAwC,CAAC;AAC3E,QAAQ,aAAa,EAAE,CAAC,wDAAwD,CAAC;AACjF,QAAQ,+BAA+B,EAAE;AACzC,YAAY,+EAA+E;AAC3F,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,uCAAuC,EAAE;AACjD,YAAY,qCAAqC;AACjD,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,uDAAuD,EAAE;AACjE,YAAY,kDAAkD;AAC9D,SAAS;AACT,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,qCAAqC,EAAE,CAAC,kCAAkC,CAAC;AACnF,QAAQ,sBAAsB,EAAE,CAAC,2CAA2C,CAAC;AAC7E,QAAQ,wBAAwB,EAAE;AAClC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,QAAQ,EAAE,CAAC,YAAY,CAAC;AAChC,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,SAAS,EAAE,CAAC,wCAAwC,CAAC;AAC7D,QAAQ,yCAAyC,EAAE;AACnD,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,8BAA8B,EAAE,CAAC,8BAA8B,CAAC;AACxE,QAAQ,qCAAqC,EAAE,CAAC,oBAAoB,CAAC;AACrE,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,aAAa,CAAC;AACzC,QAAQ,8BAA8B,EAAE,CAAC,qCAAqC,CAAC;AAC/E,QAAQ,uBAAuB,EAAE,CAAC,qCAAqC,CAAC;AACxE,QAAQ,mBAAmB,EAAE,CAAC,wBAAwB,CAAC;AACvD,QAAQ,yBAAyB,EAAE,CAAC,uCAAuC,CAAC;AAC5E,QAAQ,+BAA+B,EAAE;AACzC,YAAY,8CAA8C;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,kCAAkC,CAAC;AAC5D,QAAQ,yCAAyC,EAAE;AACnD,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,mCAAmC,EAAE,CAAC,mBAAmB,CAAC;AAClE,QAAQ,sBAAsB,EAAE,CAAC,+BAA+B,CAAC;AACjE,QAAQ,sBAAsB,EAAE,CAAC,qCAAqC,CAAC;AACvE,QAAQ,qBAAqB,EAAE,CAAC,sCAAsC,CAAC;AACvE,QAAQ,oCAAoC,EAAE,CAAC,yBAAyB,CAAC;AACzE,QAAQ,mBAAmB,EAAE,CAAC,uCAAuC,CAAC;AACtE,QAAQ,uBAAuB,EAAE,CAAC,oBAAoB,CAAC;AACvD,QAAQ,2BAA2B,EAAE,CAAC,yCAAyC,CAAC;AAChF,QAAQ,gBAAgB,EAAE,CAAC,0CAA0C,CAAC;AACtE,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,4BAA4B,EAAE,CAAC,kCAAkC,CAAC;AAC1E,QAAQ,8BAA8B,EAAE,CAAC,qCAAqC,CAAC;AAC/E,KAAK;AACL,IAAI,IAAI,EAAE;AACV,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,sCAAsC,CAAC;AAC5D,QAAQ,uBAAuB,EAAE;AACjC,YAAY,6DAA6D;AACzE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,wCAAwC,CAAC;AACtE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,kBAAkB,EAAE,CAAC,6CAA6C,CAAC;AAC3E,QAAQ,WAAW,EAAE,CAAC,wCAAwC,CAAC;AAC/D,QAAQ,gBAAgB,EAAE,CAAC,UAAU,CAAC;AACtC,QAAQ,SAAS,EAAE,CAAC,sBAAsB,CAAC;AAC3C,QAAQ,eAAe,EAAE,CAAC,0CAA0C,CAAC;AACrE,QAAQ,kBAAkB,EAAE,CAAC,8BAA8B,CAAC;AAC5D,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,oCAAoC,CAAC;AACnE,QAAQ,sBAAsB,EAAE,CAAC,sBAAsB,CAAC;AACxD,QAAQ,mBAAmB,EAAE,CAAC,mDAAmD,CAAC;AAClF,QAAQ,0BAA0B,EAAE;AACpC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,wBAAwB,CAAC;AACrD,QAAQ,qCAAqC,EAAE,CAAC,yBAAyB,CAAC;AAC1E,QAAQ,SAAS,EAAE,CAAC,gCAAgC,CAAC;AACrD,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,iCAAiC,EAAE,CAAC,gCAAgC,CAAC;AAC7E,QAAQ,qCAAqC,EAAE,CAAC,iCAAiC,CAAC;AAClF,QAAQ,4CAA4C,EAAE;AACtD,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,uCAAuC,CAAC;AAC7D,QAAQ,6BAA6B,EAAE,CAAC,4BAA4B,CAAC;AACrE,QAAQ,UAAU,EAAE,CAAC,6CAA6C,CAAC;AACnE,QAAQ,mBAAmB,EAAE,CAAC,oDAAoD,CAAC;AACnF,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,wBAAwB,CAAC;AAC7D,KAAK;AACL,IAAI,OAAO,EAAE;AACb,QAAQ,0BAA0B,EAAE,CAAC,0CAA0C,CAAC;AAChF,QAAQ,2BAA2B,EAAE;AACrC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,2BAA2B,EAAE,CAAC,2CAA2C,CAAC;AAClF,QAAQ,4BAA4B,EAAE;AACtC,YAAY,iDAAiD;AAC7D,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,iDAAiD;AAC7D,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,uDAAuD;AACnE,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE;AACZ,QAAQ,MAAM,EAAE,CAAC,uCAAuC,CAAC;AACzD,QAAQ,WAAW,EAAE,CAAC,yCAAyC,CAAC;AAChE,QAAQ,GAAG,EAAE,CAAC,qDAAqD,CAAC;AACpE,QAAQ,QAAQ,EAAE,CAAC,yDAAyD,CAAC;AAC7E,QAAQ,eAAe,EAAE;AACzB,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,oDAAoD,CAAC;AAC1E,QAAQ,YAAY,EAAE;AACtB,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,sDAAsD,CAAC;AAClF,QAAQ,cAAc,EAAE;AACxB,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,uDAAuD,CAAC;AACzE,KAAK;AACL,IAAI,YAAY,EAAE;AAClB,QAAQ,cAAc,EAAE;AACxB,YAAY,oFAAoF;AAChG,SAAS;AACT,QAAQ,QAAQ,EAAE;AAClB,YAAY,+DAA+D;AAC3E,YAAY,EAAE;AACd,YAAY,EAAE,iBAAiB,EAAE,EAAE,QAAQ,EAAE,cAAc,EAAE,EAAE;AAC/D,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,QAAQ,EAAE,CAAC,2DAA2D,CAAC;AAC/E,QAAQ,iBAAiB,EAAE,CAAC,gDAAgD,CAAC;AAC7E,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,yEAAyE;AACrF,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,kDAAkD,CAAC;AAChF,QAAQ,WAAW,EAAE;AACrB,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,iDAAiD,CAAC;AACxE,KAAK;AACL,IAAI,cAAc,EAAE;AACpB,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,uBAAuB;AACnC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,6BAA6B;AACzC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,qDAAqD;AACjE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE,EAAE,GAAG,EAAE,CAAC,aAAa,CAAC,EAAE;AACpC,IAAI,eAAe,EAAE;AACrB,QAAQ,kDAAkD,EAAE;AAC5D,YAAY,6EAA6E;AACzF,SAAS;AACT,QAAQ,iDAAiD,EAAE;AAC3D,YAAY,0EAA0E;AACtF,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,mDAAmD;AAC/D,SAAS;AACT,QAAQ,uDAAuD,EAAE;AACjE,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,mDAAmD;AAC/D,SAAS;AACT,QAAQ,sDAAsD,EAAE;AAChE,YAAY,iEAAiE;AAC7E,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,cAAc,EAAE,CAAC,2BAA2B,CAAC;AACrD,QAAQ,MAAM,EAAE,CAAC,aAAa,CAAC;AAC/B,QAAQ,aAAa,EAAE,CAAC,gCAAgC,CAAC;AACzD,QAAQ,MAAM,EAAE,CAAC,yBAAyB,CAAC;AAC3C,QAAQ,aAAa,EAAE,CAAC,+CAA+C,CAAC;AACxE,QAAQ,IAAI,EAAE,CAAC,6BAA6B,CAAC;AAC7C,QAAQ,GAAG,EAAE,CAAC,sBAAsB,CAAC;AACrC,QAAQ,UAAU,EAAE,CAAC,4CAA4C,CAAC;AAClE,QAAQ,WAAW,EAAE,CAAC,4BAA4B,CAAC;AACnD,QAAQ,IAAI,EAAE,CAAC,YAAY,CAAC;AAC5B,QAAQ,YAAY,EAAE,CAAC,+BAA+B,CAAC;AACvD,QAAQ,WAAW,EAAE,CAAC,8BAA8B,CAAC;AACrD,QAAQ,WAAW,EAAE,CAAC,6BAA6B,CAAC;AACpD,QAAQ,SAAS,EAAE,CAAC,4BAA4B,CAAC;AACjD,QAAQ,UAAU,EAAE,CAAC,mBAAmB,CAAC;AACzC,QAAQ,WAAW,EAAE,CAAC,oBAAoB,CAAC;AAC3C,QAAQ,IAAI,EAAE,CAAC,2BAA2B,CAAC;AAC3C,QAAQ,MAAM,EAAE,CAAC,8BAA8B,CAAC;AAChD,QAAQ,MAAM,EAAE,CAAC,wBAAwB,CAAC;AAC1C,QAAQ,aAAa,EAAE,CAAC,8CAA8C,CAAC;AACvE,KAAK;AACL,IAAI,GAAG,EAAE;AACT,QAAQ,UAAU,EAAE,CAAC,sCAAsC,CAAC;AAC5D,QAAQ,YAAY,EAAE,CAAC,wCAAwC,CAAC;AAChE,QAAQ,SAAS,EAAE,CAAC,qCAAqC,CAAC;AAC1D,QAAQ,SAAS,EAAE,CAAC,qCAAqC,CAAC;AAC1D,QAAQ,UAAU,EAAE,CAAC,sCAAsC,CAAC;AAC5D,QAAQ,SAAS,EAAE,CAAC,6CAA6C,CAAC;AAClE,QAAQ,OAAO,EAAE,CAAC,gDAAgD,CAAC;AACnE,QAAQ,SAAS,EAAE,CAAC,oDAAoD,CAAC;AACzE,QAAQ,MAAM,EAAE,CAAC,yCAAyC,CAAC;AAC3D,QAAQ,MAAM,EAAE,CAAC,8CAA8C,CAAC;AAChE,QAAQ,OAAO,EAAE,CAAC,gDAAgD,CAAC;AACnE,QAAQ,gBAAgB,EAAE,CAAC,mDAAmD,CAAC;AAC/E,QAAQ,SAAS,EAAE,CAAC,4CAA4C,CAAC;AACjE,KAAK;AACL,IAAI,SAAS,EAAE;AACf,QAAQ,eAAe,EAAE,CAAC,0BAA0B,CAAC;AACrD,QAAQ,WAAW,EAAE,CAAC,iCAAiC,CAAC;AACxD,KAAK;AACL,IAAI,YAAY,EAAE;AAClB,QAAQ,mCAAmC,EAAE,CAAC,8BAA8B,CAAC;AAC7E,QAAQ,qBAAqB,EAAE,CAAC,oCAAoC,CAAC;AACrE,QAAQ,sBAAsB,EAAE,CAAC,8CAA8C,CAAC;AAChF,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,8BAA8B;AAC1C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,qCAAqC,CAAC,EAAE;AAChF,SAAS;AACT,QAAQ,sCAAsC,EAAE,CAAC,iCAAiC,CAAC;AACnF,QAAQ,wBAAwB,EAAE,CAAC,uCAAuC,CAAC;AAC3E,QAAQ,yBAAyB,EAAE;AACnC,YAAY,iDAAiD;AAC7D,SAAS;AACT,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,iCAAiC;AAC7C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,wCAAwC,CAAC,EAAE;AACnF,SAAS;AACT,QAAQ,mCAAmC,EAAE,CAAC,8BAA8B,CAAC;AAC7E,QAAQ,qBAAqB,EAAE,CAAC,oCAAoC,CAAC;AACrE,QAAQ,sBAAsB,EAAE,CAAC,8CAA8C,CAAC;AAChF,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,8BAA8B;AAC1C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,qCAAqC,CAAC,EAAE;AAChF,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE;AACZ,QAAQ,YAAY,EAAE;AACtB,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,yDAAyD,CAAC;AAC9E,QAAQ,sBAAsB,EAAE,CAAC,gDAAgD,CAAC;AAClF,QAAQ,MAAM,EAAE,CAAC,mCAAmC,CAAC;AACrD,QAAQ,aAAa,EAAE;AACvB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,mCAAmC,CAAC;AAC1D,QAAQ,eAAe,EAAE,CAAC,uCAAuC,CAAC;AAClE,QAAQ,aAAa,EAAE;AACvB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,4CAA4C,CAAC;AACnE,QAAQ,eAAe,EAAE;AACzB,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,GAAG,EAAE,CAAC,iDAAiD,CAAC;AAChE,QAAQ,UAAU,EAAE,CAAC,wDAAwD,CAAC;AAC9E,QAAQ,QAAQ,EAAE,CAAC,oDAAoD,CAAC;AACxE,QAAQ,QAAQ,EAAE,CAAC,yCAAyC,CAAC;AAC7D,QAAQ,YAAY,EAAE,CAAC,yDAAyD,CAAC;AACjF,QAAQ,IAAI,EAAE,CAAC,aAAa,CAAC;AAC7B,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,YAAY,EAAE,CAAC,0DAA0D,CAAC;AAClF,QAAQ,mBAAmB,EAAE,CAAC,2CAA2C,CAAC;AAC1E,QAAQ,UAAU,EAAE,CAAC,wDAAwD,CAAC;AAC9E,QAAQ,iBAAiB,EAAE,CAAC,yCAAyC,CAAC;AACtE,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,0DAA0D;AACtE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,aAAa,CAAC,EAAE,EAAE;AACxD,SAAS;AACT,QAAQ,wBAAwB,EAAE,CAAC,kBAAkB,CAAC;AACtD,QAAQ,UAAU,EAAE,CAAC,wBAAwB,CAAC;AAC9C,QAAQ,WAAW,EAAE,CAAC,kCAAkC,CAAC;AACzD,QAAQ,sBAAsB,EAAE;AAChC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,kCAAkC,CAAC;AAC/D,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,sCAAsC,CAAC;AAChE,QAAQ,IAAI,EAAE,CAAC,sDAAsD,CAAC;AACtE,QAAQ,eAAe,EAAE;AACzB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,wDAAwD,CAAC;AAC7E,QAAQ,MAAM,EAAE,CAAC,yDAAyD,CAAC;AAC3E,QAAQ,MAAM,EAAE,CAAC,mDAAmD,CAAC;AACrE,QAAQ,aAAa,EAAE,CAAC,0DAA0D,CAAC;AACnF,QAAQ,WAAW,EAAE,CAAC,2CAA2C,CAAC;AAClE,QAAQ,eAAe,EAAE;AACzB,YAAY,2DAA2D;AACvE,SAAS;AACT,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,GAAG,EAAE,CAAC,yBAAyB,CAAC;AACxC,QAAQ,kBAAkB,EAAE,CAAC,eAAe,CAAC;AAC7C,QAAQ,UAAU,EAAE,CAAC,mCAAmC,CAAC;AACzD,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,MAAM,EAAE,CAAC,gBAAgB,CAAC;AAClC,QAAQ,SAAS,EAAE;AACnB,YAAY,oBAAoB;AAChC,YAAY,EAAE,OAAO,EAAE,EAAE,cAAc,EAAE,2BAA2B,EAAE,EAAE;AACxE,SAAS;AACT,KAAK;AACL,IAAI,IAAI,EAAE;AACV,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC;AAC1B,QAAQ,UAAU,EAAE,CAAC,cAAc,CAAC;AACpC,QAAQ,MAAM,EAAE,CAAC,UAAU,CAAC;AAC5B,QAAQ,IAAI,EAAE,CAAC,OAAO,CAAC;AACvB,KAAK;AACL,IAAI,UAAU,EAAE;AAChB,QAAQ,YAAY,EAAE,CAAC,qCAAqC,CAAC;AAC7D,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,gDAAgD;AAC5D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,sDAAsD;AAClE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,mDAAmD;AAC/D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,6CAA6C;AACzD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,0CAA0C,CAAC;AACtE,QAAQ,eAAe,EAAE,CAAC,kCAAkC,CAAC;AAC7D,QAAQ,aAAa,EAAE,CAAC,8CAA8C,CAAC;AACvE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,qCAAqC;AACjD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,2CAA2C;AACvD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,sBAAsB;AAClC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,4BAA4B;AACxC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,wDAAwD;AACpE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,kDAAkD;AAC9D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,wDAAwD,CAAC;AACnF,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,yBAAyB,EAAE,CAAC,uBAAuB,CAAC;AAC5D,QAAQ,WAAW,EAAE,CAAC,6BAA6B,CAAC;AACpD,QAAQ,WAAW,EAAE,CAAC,kCAAkC,CAAC;AACzD,QAAQ,8BAA8B,EAAE;AACxC,YAAY,+DAA+D;AAC3E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,qEAAqE;AACjF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,oCAAoC,CAAC;AAC5D,KAAK;AACL,IAAI,IAAI,EAAE;AACV,QAAQ,SAAS,EAAE,CAAC,mCAAmC,CAAC;AACxD,QAAQ,gBAAgB,EAAE,CAAC,gDAAgD,CAAC;AAC5E,QAAQ,gBAAgB,EAAE,CAAC,mCAAmC,CAAC;AAC/D,QAAQ,sBAAsB,EAAE,CAAC,oCAAoC,CAAC;AACtE,QAAQ,4BAA4B,EAAE,CAAC,2CAA2C,CAAC;AACnF,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,8BAA8B,CAAC;AAC1D,QAAQ,aAAa,EAAE,CAAC,wBAAwB,CAAC;AACjD,QAAQ,aAAa,EAAE,CAAC,oCAAoC,CAAC;AAC7D,QAAQ,GAAG,EAAE,CAAC,iBAAiB,CAAC;AAChC,QAAQ,iCAAiC,EAAE,CAAC,kCAAkC,CAAC;AAC/E,QAAQ,oBAAoB,EAAE,CAAC,wCAAwC,CAAC;AACxE,QAAQ,UAAU,EAAE,CAAC,iCAAiC,CAAC;AACvD,QAAQ,sBAAsB,EAAE,CAAC,wCAAwC,CAAC;AAC1E,QAAQ,IAAI,EAAE,CAAC,oBAAoB,CAAC;AACpC,QAAQ,oBAAoB,EAAE,CAAC,+BAA+B,CAAC;AAC/D,QAAQ,gBAAgB,EAAE,CAAC,wBAAwB,CAAC;AACpD,QAAQ,qBAAqB,EAAE,CAAC,oCAAoC,CAAC;AACrE,QAAQ,wBAAwB,EAAE,CAAC,gBAAgB,CAAC;AACpD,QAAQ,WAAW,EAAE,CAAC,4BAA4B,CAAC;AACnD,QAAQ,mBAAmB,EAAE,CAAC,mDAAmD,CAAC;AAClF,QAAQ,WAAW,EAAE,CAAC,yBAAyB,CAAC;AAChD,QAAQ,mCAAmC,EAAE,CAAC,4BAA4B,CAAC;AAC3E,QAAQ,wBAAwB,EAAE,CAAC,uCAAuC,CAAC;AAC3E,QAAQ,sBAAsB,EAAE,CAAC,6BAA6B,CAAC;AAC/D,QAAQ,iBAAiB,EAAE,CAAC,gCAAgC,CAAC;AAC7D,QAAQ,YAAY,EAAE,CAAC,uBAAuB,CAAC;AAC/C,QAAQ,WAAW,EAAE,CAAC,wCAAwC,CAAC;AAC/D,QAAQ,YAAY,EAAE,CAAC,uCAAuC,CAAC;AAC/D,QAAQ,uBAAuB,EAAE,CAAC,2CAA2C,CAAC;AAC9E,QAAQ,yBAAyB,EAAE;AACnC,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,0CAA0C,EAAE;AACpD,YAAY,8CAA8C;AAC1D,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,wCAAwC,CAAC;AACxE,QAAQ,uCAAuC,EAAE;AACjD,YAAY,2CAA2C;AACvD,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,sCAAsC,CAAC;AAC7D,QAAQ,MAAM,EAAE,CAAC,mBAAmB,CAAC;AACrC,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,oCAAoC;AAChD,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,mCAAmC,CAAC;AAC5D,QAAQ,yBAAyB,EAAE,CAAC,0CAA0C,CAAC;AAC/E,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,wCAAwC,EAAE;AAClD,YAAY,mFAAmF;AAC/F,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,4CAA4C,EAAE;AACtD,YAAY,iEAAiE;AAC7E,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,UAAU,EAAE,2CAA2C,CAAC,EAAE;AAClF,SAAS;AACT,QAAQ,2DAA2D,EAAE;AACrE,YAAY,2DAA2D;AACvE,YAAY,EAAE;AACd,YAAY;AACZ,gBAAgB,OAAO,EAAE;AACzB,oBAAoB,UAAU;AAC9B,oBAAoB,yDAAyD;AAC7E,iBAAiB;AACjB,aAAa;AACb,SAAS;AACT,QAAQ,uDAAuD,EAAE;AACjE,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,0CAA0C,EAAE;AACpD,YAAY,uEAAuE;AACnF,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,gFAAgF;AAC5F,SAAS;AACT,QAAQ,gCAAgC,EAAE;AAC1C,YAAY,sFAAsF;AAClG,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,4FAA4F;AACxG,SAAS;AACT,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,yEAAyE;AACrF,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,+FAA+F;AAC3G,SAAS;AACT,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,eAAe,EAAE;AACzB,YAAY,qDAAqD;AACjE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,0CAA0C;AACtD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,qCAAqC;AACjD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,qBAAqB;AACjC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,2BAA2B;AACvC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,aAAa,EAAE;AACvB,YAAY,qCAAqC;AACjD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,MAAM,EAAE;AAChB,YAAY,+BAA+B;AAC3C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,0CAA0C;AACtD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,sCAAsC;AAClD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,GAAG,EAAE;AACb,YAAY,4BAA4B;AACxC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,OAAO,EAAE;AACjB,YAAY,uCAAuC;AACnD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,SAAS,EAAE;AACnB,YAAY,mCAAmC;AAC/C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,gEAAgE;AAC5E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,SAAS,EAAE;AACnB,YAAY,yCAAyC;AACrD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,0CAA0C;AACtD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,oCAAoC;AAChD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,0BAA0B;AACtC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,oCAAoC;AAChD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,gCAAgC;AAC5C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,QAAQ,EAAE;AAClB,YAAY,8CAA8C;AAC1D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,0CAA0C;AACtD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,wDAAwD;AACpE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,MAAM,EAAE;AAChB,YAAY,8BAA8B;AAC1C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,yCAAyC;AACrD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,qCAAqC;AACjD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,aAAa,EAAE,CAAC,qDAAqD,CAAC;AAC9E,QAAQ,MAAM,EAAE,CAAC,kCAAkC,CAAC;AACpD,QAAQ,2BAA2B,EAAE;AACrC,YAAY,8EAA8E;AAC1F,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,wDAAwD,CAAC;AAChF,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,aAAa,EAAE;AACvB,YAAY,8EAA8E;AAC1F,SAAS;AACT,QAAQ,GAAG,EAAE,CAAC,+CAA+C,CAAC;AAC9D,QAAQ,SAAS,EAAE;AACnB,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,uDAAuD,CAAC;AACnF,QAAQ,IAAI,EAAE,CAAC,iCAAiC,CAAC;AACjD,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,uDAAuD,CAAC;AAC9E,QAAQ,SAAS,EAAE,CAAC,qDAAqD,CAAC;AAC1E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,0CAA0C,CAAC;AAC/E,QAAQ,WAAW,EAAE,CAAC,uDAAuD,CAAC;AAC9E,QAAQ,KAAK,EAAE,CAAC,qDAAqD,CAAC;AACtE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,iDAAiD,CAAC;AACnE,QAAQ,YAAY,EAAE;AACtB,YAAY,6DAA6D;AACzE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,yDAAyD;AACrE,SAAS;AACT,KAAK;AACL,IAAI,SAAS,EAAE,EAAE,GAAG,EAAE,CAAC,iBAAiB,CAAC,EAAE;AAC3C,IAAI,SAAS,EAAE;AACf,QAAQ,sBAAsB,EAAE;AAChC,YAAY,4DAA4D;AACxE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,4DAA4D;AACxE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,mEAAmE;AAC/E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,kEAAkE;AAC9E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,wGAAwG;AACpH,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,8EAA8E;AAC1F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,4EAA4E;AACxF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,4EAA4E;AACxF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,mFAAmF;AAC/F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,kFAAkF;AAC9F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,8FAA8F;AAC1G,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,wHAAwH;AACpI,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,iCAAiC;AAC7C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,YAAY;AACZ,gBAAgB,UAAU,EAAE,qIAAqI;AACjK,aAAa;AACb,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,2DAA2D;AACvE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,2DAA2D;AACvE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,kEAAkE;AAC9E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,iEAAiE;AAC7E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,uGAAuG;AACnH,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,6EAA6E;AACzF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,gBAAgB,EAAE,CAAC,oDAAoD,CAAC;AAChF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,MAAM,EAAE;AACjC,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,oDAAoD,CAAC;AAC/E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,yFAAyF;AACrG,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE;AACrC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,4EAA4E;AACxF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,4EAA4E;AACxF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,oDAAoD,CAAC;AACjF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,gDAAgD;AAC5D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,mDAAmD,CAAC;AAC7E,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,6EAA6E;AACzF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,2CAA2C,CAAC;AACzE,QAAQ,eAAe,EAAE,CAAC,iCAAiC,CAAC;AAC5D,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,sBAAsB,EAAE;AAChC,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,uCAAuC,CAAC;AACtE,QAAQ,0BAA0B,EAAE,CAAC,kBAAkB,CAAC;AACxD,QAAQ,UAAU,EAAE,CAAC,kCAAkC,CAAC;AACxD,QAAQ,WAAW,EAAE,CAAC,wBAAwB,CAAC;AAC/C,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,0BAA0B,EAAE,CAAC,2CAA2C,CAAC;AACjF,QAAQ,eAAe,EAAE;AACzB,YAAY,kCAAkC;AAC9C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,YAAY,CAAC,EAAE,EAAE;AACvD,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,uDAAuD;AACnE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,UAAU,CAAC,EAAE,EAAE;AACrD,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,kCAAkC,CAAC;AAC3D,QAAQ,iBAAiB,EAAE,CAAC,qDAAqD,CAAC;AAClF,QAAQ,MAAM,EAAE,CAAC,8BAA8B,CAAC;AAChD,QAAQ,wBAAwB,EAAE;AAClC,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,0EAA0E;AACtF,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,oDAAoD,CAAC;AACnF,QAAQ,+BAA+B,EAAE;AACzC,YAAY,+EAA+E;AAC3F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,4CAA4C,CAAC;AACvE,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,8CAA8C,CAAC;AACpE,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,oCAAoC;AAChD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,YAAY,CAAC,EAAE,EAAE;AACvD,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,oDAAoD,CAAC;AAC7E,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,8CAA8C,CAAC;AACvE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,uDAAuD;AACnE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,mDAAmD;AAC/D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,yCAAyC;AACrD,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,wBAAwB,CAAC,EAAE;AAC5D,SAAS;AACT,QAAQ,sBAAsB,EAAE,CAAC,yCAAyC,CAAC;AAC3E,QAAQ,sBAAsB,EAAE,CAAC,yCAAyC,CAAC;AAC3E,QAAQ,4BAA4B,EAAE;AACtC,YAAY,oDAAoD;AAChE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,gDAAgD;AAC5D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,GAAG,EAAE,CAAC,2BAA2B,CAAC;AAC1C,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,uEAAuE;AACnF,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,wCAAwC,CAAC;AACtE,QAAQ,yBAAyB,EAAE;AACnC,YAAY,wFAAwF;AACpG,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,kCAAkC;AAC9C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,0EAA0E;AACtF,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,6CAA6C,CAAC;AAClE,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,0CAA0C,CAAC;AAC/D,QAAQ,qBAAqB,EAAE,CAAC,gDAAgD,CAAC;AACjF,QAAQ,8BAA8B,EAAE;AACxC,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,gDAAgD,CAAC;AACnF,QAAQ,SAAS,EAAE,CAAC,yCAAyC,CAAC;AAC9D,QAAQ,sBAAsB,EAAE,CAAC,iDAAiD,CAAC;AACnF,QAAQ,gBAAgB,EAAE,CAAC,iDAAiD,CAAC;AAC7E,QAAQ,4BAA4B,EAAE;AACtC,YAAY,4EAA4E;AACxF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,0BAA0B,EAAE,CAAC,6CAA6C,CAAC;AACnF,QAAQ,UAAU,EAAE,CAAC,2CAA2C,CAAC;AACjE,QAAQ,oBAAoB,EAAE,CAAC,8CAA8C,CAAC;AAC9E,QAAQ,YAAY,EAAE,CAAC,yCAAyC,CAAC;AACjE,QAAQ,aAAa,EAAE,CAAC,uDAAuD,CAAC;AAChF,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,+CAA+C,CAAC;AAC9E,QAAQ,gBAAgB,EAAE,CAAC,2CAA2C,CAAC;AACvE,QAAQ,QAAQ,EAAE,CAAC,iCAAiC,CAAC;AACrD,QAAQ,aAAa,EAAE,CAAC,mDAAmD,CAAC;AAC5E,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,qBAAqB,EAAE,CAAC,+CAA+C,CAAC;AAChF,QAAQ,8BAA8B,EAAE;AACxC,YAAY,sFAAsF;AAClG,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,4CAA4C,CAAC;AACzE,QAAQ,SAAS,EAAE,CAAC,kCAAkC,CAAC;AACvD,QAAQ,oBAAoB,EAAE,CAAC,wCAAwC,CAAC;AACxE,QAAQ,UAAU,EAAE,CAAC,iDAAiD,CAAC;AACvE,QAAQ,eAAe,EAAE,CAAC,sDAAsD,CAAC;AACjF,QAAQ,eAAe,EAAE,CAAC,+CAA+C,CAAC;AAC1E,QAAQ,yBAAyB,EAAE;AACnC,YAAY,+EAA+E;AAC3F,SAAS;AACT,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,iDAAiD,CAAC;AACxE,QAAQ,eAAe,EAAE,CAAC,qDAAqD,CAAC;AAChF,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,QAAQ,EAAE,CAAC,yCAAyC,CAAC;AAC7D,QAAQ,UAAU,EAAE,CAAC,2CAA2C,CAAC;AACjE,QAAQ,uBAAuB,EAAE;AACjC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,oCAAoC,CAAC;AAC5D,QAAQ,yBAAyB,EAAE;AACnC,YAAY,oEAAoE;AAChF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,yCAAyC,CAAC;AACtE,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,oCAAoC,CAAC;AACzE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,mCAAmC,CAAC;AAC1D,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,cAAc,EAAE,CAAC,gCAAgC,CAAC;AAC1D,QAAQ,sBAAsB,EAAE;AAChC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,uCAAuC,CAAC;AAClE,QAAQ,wBAAwB,EAAE,CAAC,iBAAiB,CAAC;AACrD,QAAQ,UAAU,EAAE,CAAC,uBAAuB,CAAC;AAC7C,QAAQ,WAAW,EAAE,CAAC,6BAA6B,CAAC;AACpD,QAAQ,SAAS,EAAE,CAAC,iCAAiC,CAAC;AACtD,QAAQ,eAAe,EAAE,CAAC,uCAAuC,CAAC;AAClE,QAAQ,mCAAmC,EAAE,CAAC,kCAAkC,CAAC;AACjF,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,eAAe,EAAE,CAAC,wCAAwC,CAAC;AACnE,QAAQ,UAAU,EAAE,CAAC,mBAAmB,CAAC;AACzC,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,sDAAsD;AAClE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,oCAAoC,CAAC;AAC5D,QAAQ,QAAQ,EAAE,CAAC,gCAAgC,CAAC;AACpD,QAAQ,SAAS,EAAE,CAAC,iCAAiC,CAAC;AACtD,QAAQ,YAAY,EAAE,CAAC,iCAAiC,CAAC;AACzD,QAAQ,KAAK,EAAE,CAAC,mCAAmC,CAAC;AACpD,QAAQ,WAAW,EAAE,CAAC,kDAAkD,CAAC;AACzE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,6EAA6E;AACzF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,MAAM,EAAE;AACjC,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2FAA2F;AACvG,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE;AACrC,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,kFAAkF;AAC9F,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,8EAA8E;AAC1F,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,8EAA8E;AAC1F,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,qDAAqD,CAAC;AAC7E,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,kCAAkC;AAC9C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,yCAAyC,CAAC;AACtE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,0EAA0E;AACtF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,MAAM,EAAE;AACjC,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,wFAAwF;AACpG,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE;AACrC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,kDAAkD,CAAC;AAC7E,QAAQ,QAAQ,EAAE,CAAC,qCAAqC,CAAC;AACzD,QAAQ,MAAM,EAAE,CAAC,6BAA6B,CAAC;AAC/C,QAAQ,sBAAsB,EAAE;AAChC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,mDAAmD,CAAC;AAClF,QAAQ,+BAA+B,EAAE,CAAC,iCAAiC,CAAC;AAC5E,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,wFAAwF;AACpG,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,mDAAmD,CAAC;AAC5E,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,iFAAiF;AAC7F,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,EAAE;AACjE,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,iFAAiF;AAC7F,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,6CAA6C,CAAC;AACtE,QAAQ,0BAA0B,EAAE;AACpC,YAAY,oDAAoD;AAChE,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,sEAAsE;AAClF,YAAY,EAAE,OAAO,EAAE,4BAA4B,EAAE;AACrD,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE;AACZ,QAAQ,IAAI,EAAE,CAAC,kBAAkB,CAAC;AAClC,QAAQ,OAAO,EAAE,CAAC,qBAAqB,EAAE,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC;AAChF,QAAQ,qBAAqB,EAAE,CAAC,oBAAoB,CAAC;AACrD,QAAQ,MAAM,EAAE,CAAC,oBAAoB,CAAC;AACtC,QAAQ,KAAK,EAAE,CAAC,0BAA0B,CAAC;AAC3C,QAAQ,MAAM,EAAE,CAAC,oBAAoB,EAAE,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC;AAC9E,QAAQ,KAAK,EAAE,CAAC,mBAAmB,CAAC;AACpC,KAAK;AACL,IAAI,cAAc,EAAE;AACpB,QAAQ,QAAQ,EAAE;AAClB,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,kDAAkD,CAAC;AAC/E,QAAQ,WAAW,EAAE;AACrB,YAAY,mEAAmE;AAC/E,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,yDAAyD;AACrE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,yDAAyD;AACrE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,wBAAwB,CAAC;AAC1C,QAAQ,4BAA4B,EAAE;AACtC,YAAY,6EAA6E;AACzF,SAAS;AACT,QAAQ,qBAAqB,EAAE,CAAC,gDAAgD,CAAC;AACjF,QAAQ,4BAA4B,EAAE;AACtC,YAAY,gGAAgG;AAC5G,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,sCAAsC,CAAC;AAC7D,QAAQ,SAAS,EAAE,CAAC,mCAAmC,CAAC;AACxD,QAAQ,yBAAyB,EAAE;AACnC,YAAY,6FAA6F;AACzG,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,IAAI,EAAE,CAAC,uBAAuB,CAAC;AACvC,QAAQ,cAAc,EAAE,CAAC,yCAAyC,CAAC;AACnE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,+CAA+C,CAAC;AAC/E,QAAQ,wBAAwB,EAAE,CAAC,iBAAiB,CAAC;AACrD,QAAQ,gBAAgB,EAAE,CAAC,2CAA2C,CAAC;AACvE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,4CAA4C;AACxD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,yCAAyC,CAAC;AACnE,QAAQ,4BAA4B,EAAE;AACtC,YAAY,6DAA6D;AACzE,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,+FAA+F;AAC3G,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,qCAAqC,CAAC;AAC5D,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,wBAAwB,EAAE,CAAC,mBAAmB,CAAC;AACvD,QAAQ,KAAK,EAAE,CAAC,6BAA6B,CAAC;AAC9C,QAAQ,YAAY,EAAE,CAAC,6BAA6B,CAAC;AACrD,QAAQ,qBAAqB,EAAE,CAAC,+CAA+C,CAAC;AAChF,QAAQ,oCAAoC,EAAE,CAAC,gCAAgC,CAAC;AAChF,QAAQ,4BAA4B,EAAE,CAAC,qBAAqB,CAAC;AAC7D,QAAQ,kCAAkC,EAAE,CAAC,iBAAiB,CAAC;AAC/D,QAAQ,2BAA2B,EAAE,CAAC,qBAAqB,CAAC;AAC5D,QAAQ,4BAA4B,EAAE,CAAC,oCAAoC,CAAC;AAC5E,QAAQ,kCAAkC,EAAE,CAAC,4BAA4B,CAAC;AAC1E,QAAQ,MAAM,EAAE,CAAC,gCAAgC,CAAC;AAClD,QAAQ,gBAAgB,EAAE,CAAC,WAAW,CAAC;AACvC,QAAQ,aAAa,EAAE,CAAC,uBAAuB,CAAC;AAChD,QAAQ,iBAAiB,EAAE,CAAC,iCAAiC,CAAC;AAC9D,QAAQ,yBAAyB,EAAE,CAAC,iCAAiC,CAAC;AACtE,QAAQ,+BAA+B,EAAE,CAAC,yBAAyB,CAAC;AACpE,QAAQ,IAAI,EAAE,CAAC,YAAY,CAAC;AAC5B,QAAQ,0BAA0B,EAAE,CAAC,kBAAkB,CAAC;AACxD,QAAQ,0BAA0B,EAAE,CAAC,kBAAkB,CAAC;AACxD,QAAQ,2BAA2B,EAAE,CAAC,qBAAqB,CAAC;AAC5D,QAAQ,iCAAiC,EAAE,CAAC,qBAAqB,CAAC;AAClE,QAAQ,oBAAoB,EAAE,CAAC,iCAAiC,CAAC;AACjE,QAAQ,oBAAoB,EAAE,CAAC,iCAAiC,CAAC;AACjE,QAAQ,2BAA2B,EAAE,CAAC,oBAAoB,CAAC;AAC3D,QAAQ,kBAAkB,EAAE,CAAC,gCAAgC,CAAC;AAC9D,QAAQ,gCAAgC,EAAE,CAAC,yBAAyB,CAAC;AACrE,QAAQ,qBAAqB,EAAE,CAAC,4BAA4B,CAAC;AAC7D,QAAQ,iCAAiC,EAAE,CAAC,gBAAgB,CAAC;AAC7D,QAAQ,yCAAyC,EAAE,CAAC,8BAA8B,CAAC;AACnF,QAAQ,OAAO,EAAE,CAAC,gCAAgC,CAAC;AACnD,QAAQ,QAAQ,EAAE,CAAC,mCAAmC,CAAC;AACvD,QAAQ,mBAAmB,EAAE,CAAC,aAAa,CAAC;AAC5C,KAAK;AACL,CAAC;;AC53CM,MAAM,OAAO,GAAG,mBAAmB,CAAC;;ACApC,SAAS,kBAAkB,CAAC,OAAO,EAAE,YAAY,EAAE;AAC1D,IAAI,MAAM,UAAU,GAAG,EAAE,CAAC;AAC1B,IAAI,KAAK,MAAM,CAAC,KAAK,EAAE,SAAS,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;AACnE,QAAQ,KAAK,MAAM,CAAC,UAAU,EAAE,QAAQ,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;AACxE,YAAY,MAAM,CAAC,KAAK,EAAE,QAAQ,EAAE,WAAW,CAAC,GAAG,QAAQ,CAAC;AAC5D,YAAY,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACnD,YAAY,MAAM,gBAAgB,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,EAAE,QAAQ,CAAC,CAAC;AAC9E,YAAY,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;AACpC,gBAAgB,UAAU,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC;AACvC,aAAa;AACb,YAAY,MAAM,YAAY,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;AACnD,YAAY,IAAI,WAAW,EAAE;AAC7B,gBAAgB,YAAY,CAAC,UAAU,CAAC,GAAG,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,gBAAgB,EAAE,WAAW,CAAC,CAAC;AAC/G,gBAAgB,SAAS;AACzB,aAAa;AACb,YAAY,YAAY,CAAC,UAAU,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;AAClF,SAAS;AACT,KAAK;AACL,IAAI,OAAO,UAAU,CAAC;AACtB,CAAC;AACD,SAAS,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,QAAQ,EAAE,WAAW,EAAE;AACrE,IAAI,MAAM,mBAAmB,GAAG,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACnE;AACA,IAAI,SAAS,eAAe,CAAC,GAAG,IAAI,EAAE;AACtC;AACA,QAAQ,IAAI,OAAO,GAAG,mBAAmB,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;AAClE;AACA,QAAQ,IAAI,WAAW,CAAC,SAAS,EAAE;AACnC,YAAY,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE;AACjD,gBAAgB,IAAI,EAAE,OAAO,CAAC,WAAW,CAAC,SAAS,CAAC;AACpD,gBAAgB,CAAC,WAAW,CAAC,SAAS,GAAG,SAAS;AAClD,aAAa,CAAC,CAAC;AACf,YAAY,OAAO,mBAAmB,CAAC,OAAO,CAAC,CAAC;AAChD,SAAS;AACT,QAAQ,IAAI,WAAW,CAAC,OAAO,EAAE;AACjC,YAAY,MAAM,CAAC,QAAQ,EAAE,aAAa,CAAC,GAAG,WAAW,CAAC,OAAO,CAAC;AAClE,YAAY,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,EAAE,UAAU,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,EAAE,CAAC,CAAC,CAAC;AAC5H,SAAS;AACT,QAAQ,IAAI,WAAW,CAAC,UAAU,EAAE;AACpC,YAAY,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;AACrD,SAAS;AACT,QAAQ,IAAI,WAAW,CAAC,iBAAiB,EAAE;AAC3C;AACA,YAAY,MAAM,OAAO,GAAG,mBAAmB,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;AACxE,YAAY,KAAK,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,WAAW,CAAC,iBAAiB,CAAC,EAAE;AACvF,gBAAgB,IAAI,IAAI,IAAI,OAAO,EAAE;AACrC,oBAAoB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,uCAAuC,EAAE,KAAK,CAAC,CAAC,EAAE,UAAU,CAAC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AACzI,oBAAoB,IAAI,EAAE,KAAK,IAAI,OAAO,CAAC,EAAE;AAC7C,wBAAwB,OAAO,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;AACvD,qBAAqB;AACrB,oBAAoB,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC;AACzC,iBAAiB;AACjB,aAAa;AACb,YAAY,OAAO,mBAAmB,CAAC,OAAO,CAAC,CAAC;AAChD,SAAS;AACT;AACA,QAAQ,OAAO,mBAAmB,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5C,KAAK;AACL,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,eAAe,EAAE,mBAAmB,CAAC,CAAC;AAC/D,CAAC;;ACxDM,SAAS,mBAAmB,CAAC,OAAO,EAAE;AAC7C,IAAI,MAAM,GAAG,GAAG,kBAAkB,CAAC,OAAO,EAAEA,SAAS,CAAC,CAAC;AACvD,IAAI,OAAO;AACX,QAAQ,IAAI,EAAE,GAAG;AACjB,KAAK,CAAC;AACN,CAAC;AACD,mBAAmB,CAAC,OAAO,GAAG,OAAO,CAAC;AACtC,AAAO,SAAS,yBAAyB,CAAC,OAAO,EAAE;AACnD,IAAI,MAAM,GAAG,GAAG,kBAAkB,CAAC,OAAO,EAAEA,SAAS,CAAC,CAAC;AACvD,IAAI,OAAO;AACX,QAAQ,GAAG,GAAG;AACd,QAAQ,IAAI,EAAE,GAAG;AACjB,KAAK,CAAC;AACN,CAAC;AACD,yBAAyB,CAAC,OAAO,GAAG,OAAO,CAAC;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/generated/endpoints.js","../dist-src/version.js","../dist-src/endpoints-to-methods.js","../dist-src/index.js"],"sourcesContent":["const Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n approveWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\",\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\",\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\",\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\",\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\",\n ],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\",\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\",\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\",\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\",\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\",\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\",\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\",\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\",\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\",\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\",\n ],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\",\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\",\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\",\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\",\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] },\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\",\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\",\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\",\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\",\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\",\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\",\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\",\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\",\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\",\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\",\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\",\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\",\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\",\n ],\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\",\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\",\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\",\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\",\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\",\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\",\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"],\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\n \"POST /content_references/{content_reference_id}/attachments\",\n { mediaType: { previews: [\"corsair\"] } },\n ],\n createContentAttachmentForRepo: [\n \"POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments\",\n { mediaType: { previews: [\"corsair\"] } },\n ],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\",\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\",\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\",\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\",\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\",\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\",\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"],\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\",\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\",\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\",\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\",\n ],\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\",\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\",\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } },\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\",\n ],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n ],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n {},\n { renamed: [\"codeScanning\", \"listAlertInstances\"] },\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"],\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\n \"GET /codes_of_conduct\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n getConductCode: [\n \"GET /codes_of_conduct/{key}\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n getForRepo: [\n \"GET /repos/{owner}/{repo}/community/code_of_conduct\",\n { mediaType: { previews: [\"scarlet-witch\"] } },\n ],\n },\n emojis: { get: [\"GET /emojis\"] },\n enterpriseAdmin: {\n disableSelectedOrganizationGithubActionsEnterprise: [\n \"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n enableSelectedOrganizationGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\",\n ],\n getAllowedActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n getGithubActionsPermissionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions\",\n ],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"GET /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n setAllowedActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\",\n ],\n setGithubActionsPermissionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions\",\n ],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\n \"PUT /enterprises/{enterprise}/actions/permissions/organizations\",\n ],\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"],\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"],\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"],\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] },\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\",\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] },\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] },\n ],\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\",\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n { mediaType: { previews: [\"mockingbird\"] } },\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\",\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\",\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\",\n ],\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"],\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } },\n ],\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"],\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n getStatusForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listForAuthenticatedUser: [\n \"GET /user/migrations\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listForOrg: [\n \"GET /orgs/{org}/migrations\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listReposForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\",\n { mediaType: { previews: [\"wyandotte\"] } },\n ],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"],\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\",\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\",\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\",\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\",\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\",\n ],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"],\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\",\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] },\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\",\n ],\n },\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\",\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\",\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\",\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\",\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\",\n ],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\",\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\",\n ],\n },\n projects: {\n addCollaborator: [\n \"PUT /projects/{project_id}/collaborators/{username}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createCard: [\n \"POST /projects/columns/{column_id}/cards\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createColumn: [\n \"POST /projects/{project_id}/columns\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForAuthenticatedUser: [\n \"POST /user/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForOrg: [\n \"POST /orgs/{org}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n createForRepo: [\n \"POST /repos/{owner}/{repo}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n delete: [\n \"DELETE /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n deleteCard: [\n \"DELETE /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n deleteColumn: [\n \"DELETE /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n get: [\n \"GET /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getCard: [\n \"GET /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getColumn: [\n \"GET /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listCards: [\n \"GET /projects/columns/{column_id}/cards\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listCollaborators: [\n \"GET /projects/{project_id}/collaborators\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listColumns: [\n \"GET /projects/{project_id}/columns\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForOrg: [\n \"GET /orgs/{org}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForRepo: [\n \"GET /repos/{owner}/{repo}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listForUser: [\n \"GET /users/{username}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n moveCard: [\n \"POST /projects/columns/cards/{card_id}/moves\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n moveColumn: [\n \"POST /projects/columns/{column_id}/moves\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n update: [\n \"PATCH /projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n updateCard: [\n \"PATCH /projects/columns/cards/{card_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n updateColumn: [\n \"PATCH /projects/columns/{column_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\",\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\",\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\",\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\",\n { mediaType: { previews: [\"lydian\"] } },\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\",\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\",\n ],\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForRelease: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n deleteLegacy: [\n \"DELETE /reactions/{reaction_id}\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n {\n deprecated: \"octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy\",\n },\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n { mediaType: { previews: [\"squirrel-girl\"] } },\n ],\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\n \"GET /repos/{owner}/{repo}/compare/{basehead}\",\n ],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\n \"POST /repos/{owner}/{repo}/pages\",\n { mediaType: { previews: [\"switcheroo\"] } },\n ],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\",\n { mediaType: { previews: [\"baptiste\"] } },\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\",\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n deletePagesSite: [\n \"DELETE /repos/{owner}/{repo}/pages\",\n { mediaType: { previews: [\"switcheroo\"] } },\n ],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\",\n { mediaType: { previews: [\"london\"] } },\n ],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] },\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\",\n { mediaType: { previews: [\"london\"] } },\n ],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\",\n { mediaType: { previews: [\"dorian\"] } },\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\",\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n ],\n getAllTopics: [\n \"GET /repos/{owner}/{repo}/topics\",\n { mediaType: { previews: [\"mercy\"] } },\n ],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n ],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\",\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\",\n { mediaType: { previews: [\"zzzax\"] } },\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\",\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\",\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\",\n { mediaType: { previews: [\"groot\"] } },\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n { mediaType: { previews: [\"groot\"] } },\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\",\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\n \"PUT /repos/{owner}/{repo}/topics\",\n { mediaType: { previews: [\"mercy\"] } },\n ],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\",\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" },\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" },\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" },\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" },\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\",\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\",\n ],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\",\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\",\n ],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] },\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\",\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" },\n ],\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\", { mediaType: { previews: [\"cloak\"] } }],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", { mediaType: { previews: [\"mercy\"] } }],\n users: [\"GET /search/users\"],\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\",\n ],\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n ],\n listProjectsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n { mediaType: { previews: [\"inertia\"] } },\n ],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\",\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\",\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\",\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\",\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\",\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"],\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"],\n },\n};\nexport default Endpoints;\n","export const VERSION = \"5.3.1\";\n","export function endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({ method, url }, defaults);\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n const scopeMethods = newMethods[scope];\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args);\n // There are currently no other decorations than `.mapToData`\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined,\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n delete options[name];\n }\n }\n return requestWithDefaults(options);\n }\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n","import ENDPOINTS from \"./generated/endpoints\";\nimport { VERSION } from \"./version\";\nimport { endpointsToMethods } from \"./endpoints-to-methods\";\nexport function restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n rest: api,\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nexport function legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, ENDPOINTS);\n return {\n ...api,\n rest: api,\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n"],"names":["ENDPOINTS"],"mappings":"AAAA,MAAM,SAAS,GAAG;AAClB,IAAI,OAAO,EAAE;AACb,QAAQ,0BAA0B,EAAE;AACpC,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,+CAA+C,CAAC;AAClF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,+CAA+C,CAAC;AAClF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,uEAAuE;AACnF,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,4FAA4F;AACxG,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,kDAAkD,CAAC;AAC7E,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,oDAAoD,CAAC;AACjF,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,kDAAkD,EAAE;AAC5D,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,iDAAiD,EAAE;AAC3D,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,2DAA2D,CAAC;AAClF,QAAQ,uBAAuB,EAAE;AACjC,YAAY,sFAAsF;AAClG,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,uCAAuC,EAAE;AACjD,YAAY,qCAAqC;AACjD,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,iDAAiD,CAAC;AACjF,QAAQ,eAAe,EAAE,CAAC,4CAA4C,CAAC;AACvE,QAAQ,YAAY,EAAE,CAAC,+CAA+C,CAAC;AACvE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,+CAA+C;AAC3D,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,SAAS,EAAE,uCAAuC,CAAC,EAAE;AAC7E,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,sDAAsD,CAAC;AAClF,QAAQ,aAAa,EAAE,CAAC,yDAAyD,CAAC;AAClF,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,6CAA6C,CAAC;AAClF,QAAQ,0BAA0B,EAAE;AACpC,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,2DAA2D,CAAC;AAClF,QAAQ,cAAc,EAAE,CAAC,iDAAiD,CAAC;AAC3E,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,6CAA6C,CAAC;AAC7E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,iCAAiC,CAAC;AAC3D,QAAQ,eAAe,EAAE,CAAC,2CAA2C,CAAC;AACtE,QAAQ,iBAAiB,EAAE,CAAC,6CAA6C,CAAC;AAC1E,QAAQ,4BAA4B,EAAE,CAAC,2CAA2C,CAAC;AACnF,QAAQ,6BAA6B,EAAE;AACvC,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,wDAAwD,EAAE;AAClE,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,2BAA2B,EAAE,CAAC,iCAAiC,CAAC;AACxE,QAAQ,4BAA4B,EAAE,CAAC,2CAA2C,CAAC;AACnF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,wCAAwC,CAAC;AAC3E,QAAQ,aAAa,EAAE,CAAC,wDAAwD,CAAC;AACjF,QAAQ,+BAA+B,EAAE;AACzC,YAAY,+EAA+E;AAC3F,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,6BAA6B,EAAE;AACvC,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,uCAAuC,EAAE;AACjD,YAAY,qCAAqC;AACjD,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,uDAAuD,EAAE;AACjE,YAAY,kDAAkD;AAC9D,SAAS;AACT,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,qCAAqC,EAAE,CAAC,kCAAkC,CAAC;AACnF,QAAQ,sBAAsB,EAAE,CAAC,2CAA2C,CAAC;AAC7E,QAAQ,wBAAwB,EAAE;AAClC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,QAAQ,EAAE,CAAC,YAAY,CAAC;AAChC,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,SAAS,EAAE,CAAC,wCAAwC,CAAC;AAC7D,QAAQ,yCAAyC,EAAE;AACnD,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,8BAA8B,EAAE,CAAC,8BAA8B,CAAC;AACxE,QAAQ,qCAAqC,EAAE,CAAC,oBAAoB,CAAC;AACrE,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,aAAa,CAAC;AACzC,QAAQ,8BAA8B,EAAE,CAAC,qCAAqC,CAAC;AAC/E,QAAQ,uBAAuB,EAAE,CAAC,qCAAqC,CAAC;AACxE,QAAQ,mBAAmB,EAAE,CAAC,wBAAwB,CAAC;AACvD,QAAQ,yBAAyB,EAAE,CAAC,uCAAuC,CAAC;AAC5E,QAAQ,+BAA+B,EAAE;AACzC,YAAY,8CAA8C;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,kCAAkC,CAAC;AAC5D,QAAQ,yCAAyC,EAAE;AACnD,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,mCAAmC,EAAE,CAAC,mBAAmB,CAAC;AAClE,QAAQ,sBAAsB,EAAE,CAAC,+BAA+B,CAAC;AACjE,QAAQ,sBAAsB,EAAE,CAAC,qCAAqC,CAAC;AACvE,QAAQ,qBAAqB,EAAE,CAAC,sCAAsC,CAAC;AACvE,QAAQ,oCAAoC,EAAE,CAAC,yBAAyB,CAAC;AACzE,QAAQ,mBAAmB,EAAE,CAAC,uCAAuC,CAAC;AACtE,QAAQ,uBAAuB,EAAE,CAAC,oBAAoB,CAAC;AACvD,QAAQ,2BAA2B,EAAE,CAAC,yCAAyC,CAAC;AAChF,QAAQ,gBAAgB,EAAE,CAAC,0CAA0C,CAAC;AACtE,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,4BAA4B,EAAE,CAAC,kCAAkC,CAAC;AAC1E,QAAQ,8BAA8B,EAAE,CAAC,qCAAqC,CAAC;AAC/E,KAAK;AACL,IAAI,IAAI,EAAE;AACV,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,sCAAsC,CAAC;AAC5D,QAAQ,uBAAuB,EAAE;AACjC,YAAY,6DAA6D;AACzE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,kFAAkF;AAC9F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,wCAAwC,CAAC;AACtE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,kBAAkB,EAAE,CAAC,6CAA6C,CAAC;AAC3E,QAAQ,WAAW,EAAE,CAAC,wCAAwC,CAAC;AAC/D,QAAQ,gBAAgB,EAAE,CAAC,UAAU,CAAC;AACtC,QAAQ,SAAS,EAAE,CAAC,sBAAsB,CAAC;AAC3C,QAAQ,eAAe,EAAE,CAAC,0CAA0C,CAAC;AACrE,QAAQ,kBAAkB,EAAE,CAAC,8BAA8B,CAAC;AAC5D,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,oCAAoC,CAAC;AACnE,QAAQ,sBAAsB,EAAE,CAAC,sBAAsB,CAAC;AACxD,QAAQ,mBAAmB,EAAE,CAAC,mDAAmD,CAAC;AAClF,QAAQ,0BAA0B,EAAE;AACpC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,wBAAwB,CAAC;AACrD,QAAQ,qCAAqC,EAAE,CAAC,yBAAyB,CAAC;AAC1E,QAAQ,SAAS,EAAE,CAAC,gCAAgC,CAAC;AACrD,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,iCAAiC,EAAE,CAAC,gCAAgC,CAAC;AAC7E,QAAQ,qCAAqC,EAAE,CAAC,iCAAiC,CAAC;AAClF,QAAQ,4CAA4C,EAAE;AACtD,YAAY,yCAAyC;AACrD,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,uCAAuC,CAAC;AAC7D,QAAQ,6BAA6B,EAAE,CAAC,4BAA4B,CAAC;AACrE,QAAQ,UAAU,EAAE,CAAC,6CAA6C,CAAC;AACnE,QAAQ,mBAAmB,EAAE,CAAC,oDAAoD,CAAC;AACnF,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,wBAAwB,CAAC;AAC7D,KAAK;AACL,IAAI,OAAO,EAAE;AACb,QAAQ,0BAA0B,EAAE,CAAC,0CAA0C,CAAC;AAChF,QAAQ,2BAA2B,EAAE;AACrC,YAAY,gDAAgD;AAC5D,SAAS;AACT,QAAQ,2BAA2B,EAAE,CAAC,2CAA2C,CAAC;AAClF,QAAQ,4BAA4B,EAAE;AACtC,YAAY,iDAAiD;AAC7D,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,iDAAiD;AAC7D,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,uDAAuD;AACnE,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE;AACZ,QAAQ,MAAM,EAAE,CAAC,uCAAuC,CAAC;AACzD,QAAQ,WAAW,EAAE,CAAC,yCAAyC,CAAC;AAChE,QAAQ,GAAG,EAAE,CAAC,qDAAqD,CAAC;AACpE,QAAQ,QAAQ,EAAE,CAAC,yDAAyD,CAAC;AAC7E,QAAQ,eAAe,EAAE;AACzB,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,oDAAoD,CAAC;AAC1E,QAAQ,YAAY,EAAE;AACtB,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,sDAAsD,CAAC;AAClF,QAAQ,cAAc,EAAE;AACxB,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,sDAAsD;AAClE,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,uDAAuD,CAAC;AACzE,KAAK;AACL,IAAI,YAAY,EAAE;AAClB,QAAQ,cAAc,EAAE;AACxB,YAAY,oFAAoF;AAChG,SAAS;AACT,QAAQ,QAAQ,EAAE;AAClB,YAAY,+DAA+D;AAC3E,YAAY,EAAE;AACd,YAAY,EAAE,iBAAiB,EAAE,EAAE,QAAQ,EAAE,cAAc,EAAE,EAAE;AAC/D,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,QAAQ,EAAE,CAAC,2DAA2D,CAAC;AAC/E,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,yEAAyE;AACrF,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,gDAAgD,CAAC;AAC7E,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,yEAAyE;AACrF,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,oBAAoB,CAAC,EAAE;AAC/D,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,kDAAkD,CAAC;AAChF,QAAQ,WAAW,EAAE;AACrB,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,iDAAiD,CAAC;AACxE,KAAK;AACL,IAAI,cAAc,EAAE;AACpB,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,uBAAuB;AACnC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,6BAA6B;AACzC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,qDAAqD;AACjE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE,EAAE,GAAG,EAAE,CAAC,aAAa,CAAC,EAAE;AACpC,IAAI,eAAe,EAAE;AACrB,QAAQ,kDAAkD,EAAE;AAC5D,YAAY,6EAA6E;AACzF,SAAS;AACT,QAAQ,iDAAiD,EAAE;AAC3D,YAAY,0EAA0E;AACtF,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,mDAAmD;AAC/D,SAAS;AACT,QAAQ,uDAAuD,EAAE;AACjE,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,mDAAmD;AAC/D,SAAS;AACT,QAAQ,sDAAsD,EAAE;AAChE,YAAY,iEAAiE;AAC7E,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,cAAc,EAAE,CAAC,2BAA2B,CAAC;AACrD,QAAQ,MAAM,EAAE,CAAC,aAAa,CAAC;AAC/B,QAAQ,aAAa,EAAE,CAAC,gCAAgC,CAAC;AACzD,QAAQ,MAAM,EAAE,CAAC,yBAAyB,CAAC;AAC3C,QAAQ,aAAa,EAAE,CAAC,+CAA+C,CAAC;AACxE,QAAQ,IAAI,EAAE,CAAC,6BAA6B,CAAC;AAC7C,QAAQ,GAAG,EAAE,CAAC,sBAAsB,CAAC;AACrC,QAAQ,UAAU,EAAE,CAAC,4CAA4C,CAAC;AAClE,QAAQ,WAAW,EAAE,CAAC,4BAA4B,CAAC;AACnD,QAAQ,IAAI,EAAE,CAAC,YAAY,CAAC;AAC5B,QAAQ,YAAY,EAAE,CAAC,+BAA+B,CAAC;AACvD,QAAQ,WAAW,EAAE,CAAC,8BAA8B,CAAC;AACrD,QAAQ,WAAW,EAAE,CAAC,6BAA6B,CAAC;AACpD,QAAQ,SAAS,EAAE,CAAC,4BAA4B,CAAC;AACjD,QAAQ,UAAU,EAAE,CAAC,mBAAmB,CAAC;AACzC,QAAQ,WAAW,EAAE,CAAC,oBAAoB,CAAC;AAC3C,QAAQ,IAAI,EAAE,CAAC,2BAA2B,CAAC;AAC3C,QAAQ,MAAM,EAAE,CAAC,8BAA8B,CAAC;AAChD,QAAQ,MAAM,EAAE,CAAC,wBAAwB,CAAC;AAC1C,QAAQ,aAAa,EAAE,CAAC,8CAA8C,CAAC;AACvE,KAAK;AACL,IAAI,GAAG,EAAE;AACT,QAAQ,UAAU,EAAE,CAAC,sCAAsC,CAAC;AAC5D,QAAQ,YAAY,EAAE,CAAC,wCAAwC,CAAC;AAChE,QAAQ,SAAS,EAAE,CAAC,qCAAqC,CAAC;AAC1D,QAAQ,SAAS,EAAE,CAAC,qCAAqC,CAAC;AAC1D,QAAQ,UAAU,EAAE,CAAC,sCAAsC,CAAC;AAC5D,QAAQ,SAAS,EAAE,CAAC,6CAA6C,CAAC;AAClE,QAAQ,OAAO,EAAE,CAAC,gDAAgD,CAAC;AACnE,QAAQ,SAAS,EAAE,CAAC,oDAAoD,CAAC;AACzE,QAAQ,MAAM,EAAE,CAAC,yCAAyC,CAAC;AAC3D,QAAQ,MAAM,EAAE,CAAC,8CAA8C,CAAC;AAChE,QAAQ,OAAO,EAAE,CAAC,gDAAgD,CAAC;AACnE,QAAQ,gBAAgB,EAAE,CAAC,mDAAmD,CAAC;AAC/E,QAAQ,SAAS,EAAE,CAAC,4CAA4C,CAAC;AACjE,KAAK;AACL,IAAI,SAAS,EAAE;AACf,QAAQ,eAAe,EAAE,CAAC,0BAA0B,CAAC;AACrD,QAAQ,WAAW,EAAE,CAAC,iCAAiC,CAAC;AACxD,KAAK;AACL,IAAI,YAAY,EAAE;AAClB,QAAQ,mCAAmC,EAAE,CAAC,8BAA8B,CAAC;AAC7E,QAAQ,qBAAqB,EAAE,CAAC,oCAAoC,CAAC;AACrE,QAAQ,sBAAsB,EAAE,CAAC,8CAA8C,CAAC;AAChF,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,8BAA8B;AAC1C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,qCAAqC,CAAC,EAAE;AAChF,SAAS;AACT,QAAQ,sCAAsC,EAAE,CAAC,iCAAiC,CAAC;AACnF,QAAQ,wBAAwB,EAAE,CAAC,uCAAuC,CAAC;AAC3E,QAAQ,yBAAyB,EAAE;AACnC,YAAY,iDAAiD;AAC7D,SAAS;AACT,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,iCAAiC;AAC7C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,wCAAwC,CAAC,EAAE;AACnF,SAAS;AACT,QAAQ,mCAAmC,EAAE,CAAC,8BAA8B,CAAC;AAC7E,QAAQ,qBAAqB,EAAE,CAAC,oCAAoC,CAAC;AACrE,QAAQ,sBAAsB,EAAE,CAAC,8CAA8C,CAAC;AAChF,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,8BAA8B;AAC1C,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,cAAc,EAAE,qCAAqC,CAAC,EAAE;AAChF,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE;AACZ,QAAQ,YAAY,EAAE;AACtB,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,yDAAyD,CAAC;AAC9E,QAAQ,sBAAsB,EAAE,CAAC,gDAAgD,CAAC;AAClF,QAAQ,MAAM,EAAE,CAAC,mCAAmC,CAAC;AACrD,QAAQ,aAAa,EAAE;AACvB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,mCAAmC,CAAC;AAC1D,QAAQ,eAAe,EAAE,CAAC,uCAAuC,CAAC;AAClE,QAAQ,aAAa,EAAE;AACvB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,4CAA4C,CAAC;AACnE,QAAQ,eAAe,EAAE;AACzB,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,GAAG,EAAE,CAAC,iDAAiD,CAAC;AAChE,QAAQ,UAAU,EAAE,CAAC,wDAAwD,CAAC;AAC9E,QAAQ,QAAQ,EAAE,CAAC,oDAAoD,CAAC;AACxE,QAAQ,QAAQ,EAAE,CAAC,yCAAyC,CAAC;AAC7D,QAAQ,YAAY,EAAE,CAAC,yDAAyD,CAAC;AACjF,QAAQ,IAAI,EAAE,CAAC,aAAa,CAAC;AAC7B,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,YAAY,EAAE,CAAC,0DAA0D,CAAC;AAClF,QAAQ,mBAAmB,EAAE,CAAC,2CAA2C,CAAC;AAC1E,QAAQ,UAAU,EAAE,CAAC,wDAAwD,CAAC;AAC9E,QAAQ,iBAAiB,EAAE,CAAC,yCAAyC,CAAC;AACtE,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,0DAA0D;AACtE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,aAAa,CAAC,EAAE,EAAE;AACxD,SAAS;AACT,QAAQ,wBAAwB,EAAE,CAAC,kBAAkB,CAAC;AACtD,QAAQ,UAAU,EAAE,CAAC,wBAAwB,CAAC;AAC9C,QAAQ,WAAW,EAAE,CAAC,kCAAkC,CAAC;AACzD,QAAQ,sBAAsB,EAAE;AAChC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,kCAAkC,CAAC;AAC/D,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,sCAAsC,CAAC;AAChE,QAAQ,IAAI,EAAE,CAAC,sDAAsD,CAAC;AACtE,QAAQ,eAAe,EAAE;AACzB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,kEAAkE;AAC9E,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,wDAAwD,CAAC;AAC7E,QAAQ,MAAM,EAAE,CAAC,yDAAyD,CAAC;AAC3E,QAAQ,MAAM,EAAE,CAAC,mDAAmD,CAAC;AACrE,QAAQ,aAAa,EAAE,CAAC,0DAA0D,CAAC;AACnF,QAAQ,WAAW,EAAE,CAAC,2CAA2C,CAAC;AAClE,QAAQ,eAAe,EAAE;AACzB,YAAY,2DAA2D;AACvE,SAAS;AACT,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,GAAG,EAAE,CAAC,yBAAyB,CAAC;AACxC,QAAQ,kBAAkB,EAAE,CAAC,eAAe,CAAC;AAC7C,QAAQ,UAAU,EAAE,CAAC,mCAAmC,CAAC;AACzD,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,MAAM,EAAE,CAAC,gBAAgB,CAAC;AAClC,QAAQ,SAAS,EAAE;AACnB,YAAY,oBAAoB;AAChC,YAAY,EAAE,OAAO,EAAE,EAAE,cAAc,EAAE,2BAA2B,EAAE,EAAE;AACxE,SAAS;AACT,KAAK;AACL,IAAI,IAAI,EAAE;AACV,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC;AAC1B,QAAQ,UAAU,EAAE,CAAC,cAAc,CAAC;AACpC,QAAQ,MAAM,EAAE,CAAC,UAAU,CAAC;AAC5B,QAAQ,IAAI,EAAE,CAAC,OAAO,CAAC;AACvB,KAAK;AACL,IAAI,UAAU,EAAE;AAChB,QAAQ,YAAY,EAAE,CAAC,qCAAqC,CAAC;AAC7D,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,gDAAgD;AAC5D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,sDAAsD;AAClE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,mDAAmD;AAC/D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,6CAA6C;AACzD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,0CAA0C,CAAC;AACtE,QAAQ,eAAe,EAAE,CAAC,kCAAkC,CAAC;AAC7D,QAAQ,aAAa,EAAE,CAAC,8CAA8C,CAAC;AACvE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,qCAAqC;AACjD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,2CAA2C;AACvD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,sBAAsB;AAClC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,4BAA4B;AACxC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,wDAAwD;AACpE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,kDAAkD;AAC9D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,wDAAwD,CAAC;AACnF,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,yBAAyB,EAAE,CAAC,uBAAuB,CAAC;AAC5D,QAAQ,WAAW,EAAE,CAAC,6BAA6B,CAAC;AACpD,QAAQ,WAAW,EAAE,CAAC,kCAAkC,CAAC;AACzD,QAAQ,8BAA8B,EAAE;AACxC,YAAY,+DAA+D;AAC3E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,qEAAqE;AACjF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,WAAW,CAAC,EAAE,EAAE;AACtD,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,oCAAoC,CAAC;AAC5D,KAAK;AACL,IAAI,IAAI,EAAE;AACV,QAAQ,SAAS,EAAE,CAAC,mCAAmC,CAAC;AACxD,QAAQ,gBAAgB,EAAE,CAAC,gDAAgD,CAAC;AAC5E,QAAQ,gBAAgB,EAAE,CAAC,mCAAmC,CAAC;AAC/D,QAAQ,sBAAsB,EAAE,CAAC,oCAAoC,CAAC;AACtE,QAAQ,4BAA4B,EAAE,CAAC,2CAA2C,CAAC;AACnF,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,8BAA8B,CAAC;AAC1D,QAAQ,aAAa,EAAE,CAAC,wBAAwB,CAAC;AACjD,QAAQ,aAAa,EAAE,CAAC,oCAAoC,CAAC;AAC7D,QAAQ,GAAG,EAAE,CAAC,iBAAiB,CAAC;AAChC,QAAQ,iCAAiC,EAAE,CAAC,kCAAkC,CAAC;AAC/E,QAAQ,oBAAoB,EAAE,CAAC,wCAAwC,CAAC;AACxE,QAAQ,UAAU,EAAE,CAAC,iCAAiC,CAAC;AACvD,QAAQ,sBAAsB,EAAE,CAAC,wCAAwC,CAAC;AAC1E,QAAQ,IAAI,EAAE,CAAC,oBAAoB,CAAC;AACpC,QAAQ,oBAAoB,EAAE,CAAC,+BAA+B,CAAC;AAC/D,QAAQ,gBAAgB,EAAE,CAAC,wBAAwB,CAAC;AACpD,QAAQ,qBAAqB,EAAE,CAAC,oCAAoC,CAAC;AACrE,QAAQ,wBAAwB,EAAE,CAAC,gBAAgB,CAAC;AACpD,QAAQ,WAAW,EAAE,CAAC,4BAA4B,CAAC;AACnD,QAAQ,mBAAmB,EAAE,CAAC,mDAAmD,CAAC;AAClF,QAAQ,WAAW,EAAE,CAAC,yBAAyB,CAAC;AAChD,QAAQ,mCAAmC,EAAE,CAAC,4BAA4B,CAAC;AAC3E,QAAQ,wBAAwB,EAAE,CAAC,uCAAuC,CAAC;AAC3E,QAAQ,sBAAsB,EAAE,CAAC,6BAA6B,CAAC;AAC/D,QAAQ,iBAAiB,EAAE,CAAC,gCAAgC,CAAC;AAC7D,QAAQ,YAAY,EAAE,CAAC,uBAAuB,CAAC;AAC/C,QAAQ,WAAW,EAAE,CAAC,wCAAwC,CAAC;AAC/D,QAAQ,YAAY,EAAE,CAAC,uCAAuC,CAAC;AAC/D,QAAQ,uBAAuB,EAAE,CAAC,2CAA2C,CAAC;AAC9E,QAAQ,yBAAyB,EAAE;AACnC,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,0CAA0C,EAAE;AACpD,YAAY,8CAA8C;AAC1D,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,wCAAwC,CAAC;AACxE,QAAQ,uCAAuC,EAAE;AACjD,YAAY,2CAA2C;AACvD,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,sCAAsC,CAAC;AAC7D,QAAQ,MAAM,EAAE,CAAC,mBAAmB,CAAC;AACrC,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,oCAAoC;AAChD,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,mCAAmC,CAAC;AAC5D,QAAQ,yBAAyB,EAAE,CAAC,0CAA0C,CAAC;AAC/E,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,qDAAqD;AACjE,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,wCAAwC,EAAE;AAClD,YAAY,mFAAmF;AAC/F,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,4CAA4C,EAAE;AACtD,YAAY,iEAAiE;AAC7E,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,UAAU,EAAE,2CAA2C,CAAC,EAAE;AAClF,SAAS;AACT,QAAQ,2DAA2D,EAAE;AACrE,YAAY,2DAA2D;AACvE,YAAY,EAAE;AACd,YAAY;AACZ,gBAAgB,OAAO,EAAE;AACzB,oBAAoB,UAAU;AAC9B,oBAAoB,yDAAyD;AAC7E,iBAAiB;AACjB,aAAa;AACb,SAAS;AACT,QAAQ,uDAAuD,EAAE;AACjE,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,0CAA0C,EAAE;AACpD,YAAY,uEAAuE;AACnF,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,qCAAqC,EAAE;AAC/C,YAAY,gFAAgF;AAC5F,SAAS;AACT,QAAQ,gCAAgC,EAAE;AAC1C,YAAY,sFAAsF;AAClG,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,4FAA4F;AACxG,SAAS;AACT,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,yEAAyE;AACrF,SAAS;AACT,QAAQ,yCAAyC,EAAE;AACnD,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,+FAA+F;AAC3G,SAAS;AACT,KAAK;AACL,IAAI,QAAQ,EAAE;AACd,QAAQ,eAAe,EAAE;AACzB,YAAY,qDAAqD;AACjE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,0CAA0C;AACtD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,qCAAqC;AACjD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,qBAAqB;AACjC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,2BAA2B;AACvC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,aAAa,EAAE;AACvB,YAAY,qCAAqC;AACjD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,MAAM,EAAE;AAChB,YAAY,+BAA+B;AAC3C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,0CAA0C;AACtD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,sCAAsC;AAClD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,GAAG,EAAE;AACb,YAAY,4BAA4B;AACxC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,OAAO,EAAE;AACjB,YAAY,uCAAuC;AACnD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,SAAS,EAAE;AACnB,YAAY,mCAAmC;AAC/C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,gEAAgE;AAC5E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,SAAS,EAAE;AACnB,YAAY,yCAAyC;AACrD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,0CAA0C;AACtD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,oCAAoC;AAChD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,0BAA0B;AACtC,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,oCAAoC;AAChD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,WAAW,EAAE;AACrB,YAAY,gCAAgC;AAC5C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,QAAQ,EAAE;AAClB,YAAY,8CAA8C;AAC1D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,0CAA0C;AACtD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,wDAAwD;AACpE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,MAAM,EAAE;AAChB,YAAY,8BAA8B;AAC1C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,UAAU,EAAE;AACpB,YAAY,yCAAyC;AACrD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,qCAAqC;AACjD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,aAAa,EAAE,CAAC,qDAAqD,CAAC;AAC9E,QAAQ,MAAM,EAAE,CAAC,kCAAkC,CAAC;AACpD,QAAQ,2BAA2B,EAAE;AACrC,YAAY,8EAA8E;AAC1F,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,wDAAwD,CAAC;AAChF,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,aAAa,EAAE;AACvB,YAAY,8EAA8E;AAC1F,SAAS;AACT,QAAQ,GAAG,EAAE,CAAC,+CAA+C,CAAC;AAC9D,QAAQ,SAAS,EAAE;AACnB,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,gBAAgB,EAAE,CAAC,uDAAuD,CAAC;AACnF,QAAQ,IAAI,EAAE,CAAC,iCAAiC,CAAC;AACjD,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,uDAAuD,CAAC;AAC9E,QAAQ,SAAS,EAAE,CAAC,qDAAqD,CAAC;AAC1E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,0CAA0C,CAAC;AAC/E,QAAQ,WAAW,EAAE,CAAC,uDAAuD,CAAC;AAC9E,QAAQ,KAAK,EAAE,CAAC,qDAAqD,CAAC;AACtE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,oEAAoE;AAChF,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,iDAAiD,CAAC;AACnE,QAAQ,YAAY,EAAE;AACtB,YAAY,6DAA6D;AACzE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,yDAAyD;AACrE,SAAS;AACT,KAAK;AACL,IAAI,SAAS,EAAE,EAAE,GAAG,EAAE,CAAC,iBAAiB,CAAC,EAAE;AAC3C,IAAI,SAAS,EAAE;AACf,QAAQ,sBAAsB,EAAE;AAChC,YAAY,4DAA4D;AACxE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,4DAA4D;AACxE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,mEAAmE;AAC/E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,kEAAkE;AAC9E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,4DAA4D;AACxE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,wGAAwG;AACpH,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,8EAA8E;AAC1F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,4EAA4E;AACxF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,4EAA4E;AACxF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,mFAAmF;AAC/F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,kFAAkF;AAC9F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,uBAAuB,EAAE;AACjC,YAAY,8FAA8F;AAC1G,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,8BAA8B,EAAE;AACxC,YAAY,wHAAwH;AACpI,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,iCAAiC;AAC7C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,YAAY;AACZ,gBAAgB,UAAU,EAAE,qIAAqI;AACjK,aAAa;AACb,SAAS;AACT,QAAQ,oBAAoB,EAAE;AAC9B,YAAY,2DAA2D;AACvE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,2DAA2D;AACvE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,kEAAkE;AAC9E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,iEAAiE;AAC7E,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,uGAAuG;AACnH,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,6EAA6E;AACzF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,eAAe,CAAC,EAAE,EAAE;AAC1D,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,gBAAgB,EAAE,CAAC,oDAAoD,CAAC;AAChF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,MAAM,EAAE;AACjC,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,oDAAoD,CAAC;AAC/E,QAAQ,sBAAsB,EAAE;AAChC,YAAY,yFAAyF;AACrG,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE;AACrC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,4EAA4E;AACxF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,4EAA4E;AACxF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,oDAAoD,CAAC;AACjF,QAAQ,wBAAwB,EAAE;AAClC,YAAY,gDAAgD;AAC5D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,mDAAmD,CAAC;AAC7E,QAAQ,0BAA0B,EAAE;AACpC,YAAY,8CAA8C;AAC1D,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,6EAA6E;AACzF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,2CAA2C,CAAC;AACzE,QAAQ,eAAe,EAAE,CAAC,iCAAiC,CAAC;AAC5D,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,sBAAsB,EAAE;AAChC,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,uCAAuC,CAAC;AACtE,QAAQ,0BAA0B,EAAE,CAAC,kBAAkB,CAAC;AACxD,QAAQ,UAAU,EAAE,CAAC,kCAAkC,CAAC;AACxD,QAAQ,WAAW,EAAE,CAAC,wBAAwB,CAAC;AAC/C,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,0BAA0B,EAAE,CAAC,2CAA2C,CAAC;AACjF,QAAQ,eAAe,EAAE;AACzB,YAAY,kCAAkC;AAC9C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,YAAY,CAAC,EAAE,EAAE;AACvD,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,uDAAuD;AACnE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,UAAU,CAAC,EAAE,EAAE;AACrD,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,kCAAkC,CAAC;AAC3D,QAAQ,iBAAiB,EAAE,CAAC,qDAAqD,CAAC;AAClF,QAAQ,MAAM,EAAE,CAAC,8BAA8B,CAAC;AAChD,QAAQ,wBAAwB,EAAE;AAClC,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,0EAA0E;AACtF,SAAS;AACT,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,8DAA8D;AAC1E,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,oDAAoD,CAAC;AACnF,QAAQ,+BAA+B,EAAE;AACzC,YAAY,+EAA+E;AAC3F,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,4CAA4C,CAAC;AACvE,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,UAAU,EAAE,CAAC,8CAA8C,CAAC;AACpE,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,oCAAoC;AAChD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,YAAY,CAAC,EAAE,EAAE;AACvD,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,yFAAyF;AACrG,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,oDAAoD,CAAC;AAC7E,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,8CAA8C,CAAC;AACvE,QAAQ,6BAA6B,EAAE;AACvC,YAAY,uDAAuD;AACnE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,mDAAmD;AAC/D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,yCAAyC;AACrD,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,wBAAwB,CAAC,EAAE;AAC5D,SAAS;AACT,QAAQ,sBAAsB,EAAE,CAAC,yCAAyC,CAAC;AAC3E,QAAQ,sBAAsB,EAAE,CAAC,yCAAyC,CAAC;AAC3E,QAAQ,4BAA4B,EAAE;AACtC,YAAY,oDAAoD;AAChE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,gDAAgD;AAC5D,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE;AACnD,SAAS;AACT,QAAQ,GAAG,EAAE,CAAC,2BAA2B,CAAC;AAC1C,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,uEAAuE;AACnF,SAAS;AACT,QAAQ,kBAAkB,EAAE,CAAC,wCAAwC,CAAC;AACtE,QAAQ,yBAAyB,EAAE;AACnC,YAAY,wFAAwF;AACpG,SAAS;AACT,QAAQ,YAAY,EAAE;AACtB,YAAY,kCAAkC;AAC9C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,0EAA0E;AACtF,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,6CAA6C,CAAC;AAClE,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,SAAS,EAAE,CAAC,0CAA0C,CAAC;AAC/D,QAAQ,qBAAqB,EAAE,CAAC,gDAAgD,CAAC;AACjF,QAAQ,8BAA8B,EAAE;AACxC,YAAY,+DAA+D;AAC3E,SAAS;AACT,QAAQ,uBAAuB,EAAE,CAAC,gDAAgD,CAAC;AACnF,QAAQ,SAAS,EAAE,CAAC,yCAAyC,CAAC;AAC9D,QAAQ,sBAAsB,EAAE,CAAC,iDAAiD,CAAC;AACnF,QAAQ,gBAAgB,EAAE,CAAC,iDAAiD,CAAC;AAC7E,QAAQ,4BAA4B,EAAE;AACtC,YAAY,4EAA4E;AACxF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,0BAA0B,EAAE,CAAC,6CAA6C,CAAC;AACnF,QAAQ,UAAU,EAAE,CAAC,2CAA2C,CAAC;AACjE,QAAQ,oBAAoB,EAAE,CAAC,8CAA8C,CAAC;AAC9E,QAAQ,YAAY,EAAE,CAAC,yCAAyC,CAAC;AACjE,QAAQ,aAAa,EAAE,CAAC,uDAAuD,CAAC;AAChF,QAAQ,mBAAmB,EAAE;AAC7B,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,cAAc,EAAE;AACxB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,+CAA+C,CAAC;AAC9E,QAAQ,gBAAgB,EAAE,CAAC,2CAA2C,CAAC;AACvE,QAAQ,QAAQ,EAAE,CAAC,iCAAiC,CAAC;AACrD,QAAQ,aAAa,EAAE,CAAC,mDAAmD,CAAC;AAC5E,QAAQ,mBAAmB,EAAE,CAAC,wCAAwC,CAAC;AACvE,QAAQ,qBAAqB,EAAE,CAAC,+CAA+C,CAAC;AAChF,QAAQ,8BAA8B,EAAE;AACxC,YAAY,sFAAsF;AAClG,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,4CAA4C,CAAC;AACzE,QAAQ,SAAS,EAAE,CAAC,kCAAkC,CAAC;AACvD,QAAQ,oBAAoB,EAAE,CAAC,wCAAwC,CAAC;AACxE,QAAQ,UAAU,EAAE,CAAC,iDAAiD,CAAC;AACvE,QAAQ,eAAe,EAAE,CAAC,sDAAsD,CAAC;AACjF,QAAQ,eAAe,EAAE,CAAC,+CAA+C,CAAC;AAC1E,QAAQ,yBAAyB,EAAE;AACnC,YAAY,+EAA+E;AAC3F,SAAS;AACT,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,iDAAiD,CAAC;AACxE,QAAQ,eAAe,EAAE,CAAC,qDAAqD,CAAC;AAChF,QAAQ,mCAAmC,EAAE;AAC7C,YAAY,2EAA2E;AACvF,SAAS;AACT,QAAQ,QAAQ,EAAE,CAAC,yCAAyC,CAAC;AAC7D,QAAQ,UAAU,EAAE,CAAC,2CAA2C,CAAC;AACjE,QAAQ,uBAAuB,EAAE;AACjC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,oCAAoC,CAAC;AAC5D,QAAQ,yBAAyB,EAAE;AACnC,YAAY,oEAAoE;AAChF,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,yCAAyC,CAAC;AACtE,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,yBAAyB,EAAE,CAAC,oCAAoC,CAAC;AACzE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,kDAAkD;AAC9D,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,mCAAmC,CAAC;AAC1D,QAAQ,gBAAgB,EAAE,CAAC,wCAAwC,CAAC;AACpE,QAAQ,cAAc,EAAE,CAAC,gCAAgC,CAAC;AAC1D,QAAQ,sBAAsB,EAAE;AAChC,YAAY,gEAAgE;AAC5E,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,uCAAuC,CAAC;AAClE,QAAQ,wBAAwB,EAAE,CAAC,iBAAiB,CAAC;AACrD,QAAQ,UAAU,EAAE,CAAC,uBAAuB,CAAC;AAC7C,QAAQ,WAAW,EAAE,CAAC,6BAA6B,CAAC;AACpD,QAAQ,SAAS,EAAE,CAAC,iCAAiC,CAAC;AACtD,QAAQ,eAAe,EAAE,CAAC,uCAAuC,CAAC;AAClE,QAAQ,mCAAmC,EAAE,CAAC,kCAAkC,CAAC;AACjF,QAAQ,aAAa,EAAE,CAAC,qCAAqC,CAAC;AAC9D,QAAQ,eAAe,EAAE,CAAC,wCAAwC,CAAC;AACnE,QAAQ,UAAU,EAAE,CAAC,mBAAmB,CAAC;AACzC,QAAQ,oCAAoC,EAAE;AAC9C,YAAY,sDAAsD;AAClE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,oCAAoC,CAAC;AAC5D,QAAQ,QAAQ,EAAE,CAAC,gCAAgC,CAAC;AACpD,QAAQ,SAAS,EAAE,CAAC,iCAAiC,CAAC;AACtD,QAAQ,YAAY,EAAE,CAAC,iCAAiC,CAAC;AACzD,QAAQ,KAAK,EAAE,CAAC,mCAAmC,CAAC;AACpD,QAAQ,WAAW,EAAE,CAAC,kDAAkD,CAAC;AACzE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,6EAA6E;AACzF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,MAAM,EAAE;AACjC,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,uDAAuD;AACnE,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2FAA2F;AACvG,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE;AACrC,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,kFAAkF;AAC9F,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,8EAA8E;AAC1F,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,8EAA8E;AAC1F,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,YAAY,EAAE,CAAC,qDAAqD,CAAC;AAC7E,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,kCAAkC;AAC9C,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE;AAClD,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,yCAAyC,CAAC;AACtE,QAAQ,wBAAwB,EAAE;AAClC,YAAY,wEAAwE;AACpF,SAAS;AACT,QAAQ,wBAAwB,EAAE;AAClC,YAAY,0EAA0E;AACtF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,MAAM,EAAE;AACjC,SAAS;AACT,QAAQ,sBAAsB,EAAE;AAChC,YAAY,wFAAwF;AACpG,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE;AACrC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,2EAA2E;AACvF,YAAY,EAAE;AACd,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE;AAClC,SAAS;AACT,QAAQ,eAAe,EAAE,CAAC,kDAAkD,CAAC;AAC7E,QAAQ,QAAQ,EAAE,CAAC,qCAAqC,CAAC;AACzD,QAAQ,MAAM,EAAE,CAAC,6BAA6B,CAAC;AAC/C,QAAQ,sBAAsB,EAAE;AAChC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,mBAAmB,EAAE,CAAC,mDAAmD,CAAC;AAClF,QAAQ,+BAA+B,EAAE,CAAC,iCAAiC,CAAC;AAC5E,QAAQ,gBAAgB,EAAE;AAC1B,YAAY,yDAAyD;AACrE,SAAS;AACT,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,wFAAwF;AACpG,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,mDAAmD,CAAC;AAC5E,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,0BAA0B,EAAE;AACpC,YAAY,iFAAiF;AAC7F,YAAY,EAAE;AACd,YAAY,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,EAAE;AACjE,SAAS;AACT,QAAQ,2BAA2B,EAAE;AACrC,YAAY,iFAAiF;AAC7F,SAAS;AACT,QAAQ,aAAa,EAAE,CAAC,6CAA6C,CAAC;AACtE,QAAQ,0BAA0B,EAAE;AACpC,YAAY,oDAAoD;AAChE,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,sEAAsE;AAClF,YAAY,EAAE,OAAO,EAAE,4BAA4B,EAAE;AACrD,SAAS;AACT,KAAK;AACL,IAAI,MAAM,EAAE;AACZ,QAAQ,IAAI,EAAE,CAAC,kBAAkB,CAAC;AAClC,QAAQ,OAAO,EAAE,CAAC,qBAAqB,EAAE,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC;AAChF,QAAQ,qBAAqB,EAAE,CAAC,oBAAoB,CAAC;AACrD,QAAQ,MAAM,EAAE,CAAC,oBAAoB,CAAC;AACtC,QAAQ,KAAK,EAAE,CAAC,0BAA0B,CAAC;AAC3C,QAAQ,MAAM,EAAE,CAAC,oBAAoB,EAAE,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC;AAC9E,QAAQ,KAAK,EAAE,CAAC,mBAAmB,CAAC;AACpC,KAAK;AACL,IAAI,cAAc,EAAE;AACpB,QAAQ,QAAQ,EAAE;AAClB,YAAY,iEAAiE;AAC7E,SAAS;AACT,QAAQ,iBAAiB,EAAE,CAAC,kDAAkD,CAAC;AAC/E,QAAQ,WAAW,EAAE;AACrB,YAAY,mEAAmE;AAC/E,SAAS;AACT,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,iCAAiC,EAAE;AAC3C,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,kCAAkC,EAAE;AAC5C,YAAY,yDAAyD;AACrE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,+BAA+B,EAAE;AACzC,YAAY,yDAAyD;AACrE,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,wDAAwD;AACpE,SAAS;AACT,QAAQ,MAAM,EAAE,CAAC,wBAAwB,CAAC;AAC1C,QAAQ,4BAA4B,EAAE;AACtC,YAAY,6EAA6E;AACzF,SAAS;AACT,QAAQ,qBAAqB,EAAE,CAAC,gDAAgD,CAAC;AACjF,QAAQ,4BAA4B,EAAE;AACtC,YAAY,gGAAgG;AAC5G,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,sEAAsE;AAClF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,sCAAsC,CAAC;AAC7D,QAAQ,SAAS,EAAE,CAAC,mCAAmC,CAAC;AACxD,QAAQ,yBAAyB,EAAE;AACnC,YAAY,6FAA6F;AACzG,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,mEAAmE;AAC/E,SAAS;AACT,QAAQ,yBAAyB,EAAE;AACnC,YAAY,0DAA0D;AACtE,SAAS;AACT,QAAQ,IAAI,EAAE,CAAC,uBAAuB,CAAC;AACvC,QAAQ,cAAc,EAAE,CAAC,yCAAyC,CAAC;AACnE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,4EAA4E;AACxF,SAAS;AACT,QAAQ,oBAAoB,EAAE,CAAC,+CAA+C,CAAC;AAC/E,QAAQ,wBAAwB,EAAE,CAAC,iBAAiB,CAAC;AACrD,QAAQ,gBAAgB,EAAE,CAAC,2CAA2C,CAAC;AACvE,QAAQ,2BAA2B,EAAE;AACrC,YAAY,+CAA+C;AAC3D,SAAS;AACT,QAAQ,iBAAiB,EAAE;AAC3B,YAAY,4CAA4C;AACxD,YAAY,EAAE,SAAS,EAAE,EAAE,QAAQ,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE;AACpD,SAAS;AACT,QAAQ,cAAc,EAAE,CAAC,yCAAyC,CAAC;AACnE,QAAQ,4BAA4B,EAAE;AACtC,YAAY,6DAA6D;AACzE,SAAS;AACT,QAAQ,kBAAkB,EAAE;AAC5B,YAAY,4DAA4D;AACxE,SAAS;AACT,QAAQ,eAAe,EAAE;AACzB,YAAY,2DAA2D;AACvE,SAAS;AACT,QAAQ,4BAA4B,EAAE;AACtC,YAAY,+FAA+F;AAC3G,SAAS;AACT,QAAQ,qBAAqB,EAAE;AAC/B,YAAY,qEAAqE;AACjF,SAAS;AACT,QAAQ,WAAW,EAAE,CAAC,qCAAqC,CAAC;AAC5D,KAAK;AACL,IAAI,KAAK,EAAE;AACX,QAAQ,wBAAwB,EAAE,CAAC,mBAAmB,CAAC;AACvD,QAAQ,KAAK,EAAE,CAAC,6BAA6B,CAAC;AAC9C,QAAQ,YAAY,EAAE,CAAC,6BAA6B,CAAC;AACrD,QAAQ,qBAAqB,EAAE,CAAC,+CAA+C,CAAC;AAChF,QAAQ,oCAAoC,EAAE,CAAC,gCAAgC,CAAC;AAChF,QAAQ,4BAA4B,EAAE,CAAC,qBAAqB,CAAC;AAC7D,QAAQ,kCAAkC,EAAE,CAAC,iBAAiB,CAAC;AAC/D,QAAQ,2BAA2B,EAAE,CAAC,qBAAqB,CAAC;AAC5D,QAAQ,4BAA4B,EAAE,CAAC,oCAAoC,CAAC;AAC5E,QAAQ,kCAAkC,EAAE,CAAC,4BAA4B,CAAC;AAC1E,QAAQ,MAAM,EAAE,CAAC,gCAAgC,CAAC;AAClD,QAAQ,gBAAgB,EAAE,CAAC,WAAW,CAAC;AACvC,QAAQ,aAAa,EAAE,CAAC,uBAAuB,CAAC;AAChD,QAAQ,iBAAiB,EAAE,CAAC,iCAAiC,CAAC;AAC9D,QAAQ,yBAAyB,EAAE,CAAC,iCAAiC,CAAC;AACtE,QAAQ,+BAA+B,EAAE,CAAC,yBAAyB,CAAC;AACpE,QAAQ,IAAI,EAAE,CAAC,YAAY,CAAC;AAC5B,QAAQ,0BAA0B,EAAE,CAAC,kBAAkB,CAAC;AACxD,QAAQ,0BAA0B,EAAE,CAAC,kBAAkB,CAAC;AACxD,QAAQ,2BAA2B,EAAE,CAAC,qBAAqB,CAAC;AAC5D,QAAQ,iCAAiC,EAAE,CAAC,qBAAqB,CAAC;AAClE,QAAQ,oBAAoB,EAAE,CAAC,iCAAiC,CAAC;AACjE,QAAQ,oBAAoB,EAAE,CAAC,iCAAiC,CAAC;AACjE,QAAQ,2BAA2B,EAAE,CAAC,oBAAoB,CAAC;AAC3D,QAAQ,kBAAkB,EAAE,CAAC,gCAAgC,CAAC;AAC9D,QAAQ,gCAAgC,EAAE,CAAC,yBAAyB,CAAC;AACrE,QAAQ,qBAAqB,EAAE,CAAC,4BAA4B,CAAC;AAC7D,QAAQ,iCAAiC,EAAE,CAAC,gBAAgB,CAAC;AAC7D,QAAQ,yCAAyC,EAAE,CAAC,8BAA8B,CAAC;AACnF,QAAQ,OAAO,EAAE,CAAC,gCAAgC,CAAC;AACnD,QAAQ,QAAQ,EAAE,CAAC,mCAAmC,CAAC;AACvD,QAAQ,mBAAmB,EAAE,CAAC,aAAa,CAAC;AAC5C,KAAK;AACL,CAAC;;AC/4CM,MAAM,OAAO,GAAG,mBAAmB,CAAC;;ACApC,SAAS,kBAAkB,CAAC,OAAO,EAAE,YAAY,EAAE;AAC1D,IAAI,MAAM,UAAU,GAAG,EAAE,CAAC;AAC1B,IAAI,KAAK,MAAM,CAAC,KAAK,EAAE,SAAS,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;AACnE,QAAQ,KAAK,MAAM,CAAC,UAAU,EAAE,QAAQ,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;AACxE,YAAY,MAAM,CAAC,KAAK,EAAE,QAAQ,EAAE,WAAW,CAAC,GAAG,QAAQ,CAAC;AAC5D,YAAY,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACnD,YAAY,MAAM,gBAAgB,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,EAAE,QAAQ,CAAC,CAAC;AAC9E,YAAY,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;AACpC,gBAAgB,UAAU,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC;AACvC,aAAa;AACb,YAAY,MAAM,YAAY,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;AACnD,YAAY,IAAI,WAAW,EAAE;AAC7B,gBAAgB,YAAY,CAAC,UAAU,CAAC,GAAG,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,gBAAgB,EAAE,WAAW,CAAC,CAAC;AAC/G,gBAAgB,SAAS;AACzB,aAAa;AACb,YAAY,YAAY,CAAC,UAAU,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;AAClF,SAAS;AACT,KAAK;AACL,IAAI,OAAO,UAAU,CAAC;AACtB,CAAC;AACD,SAAS,QAAQ,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,QAAQ,EAAE,WAAW,EAAE;AACrE,IAAI,MAAM,mBAAmB,GAAG,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACnE;AACA,IAAI,SAAS,eAAe,CAAC,GAAG,IAAI,EAAE;AACtC;AACA,QAAQ,IAAI,OAAO,GAAG,mBAAmB,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;AAClE;AACA,QAAQ,IAAI,WAAW,CAAC,SAAS,EAAE;AACnC,YAAY,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE;AACjD,gBAAgB,IAAI,EAAE,OAAO,CAAC,WAAW,CAAC,SAAS,CAAC;AACpD,gBAAgB,CAAC,WAAW,CAAC,SAAS,GAAG,SAAS;AAClD,aAAa,CAAC,CAAC;AACf,YAAY,OAAO,mBAAmB,CAAC,OAAO,CAAC,CAAC;AAChD,SAAS;AACT,QAAQ,IAAI,WAAW,CAAC,OAAO,EAAE;AACjC,YAAY,MAAM,CAAC,QAAQ,EAAE,aAAa,CAAC,GAAG,WAAW,CAAC,OAAO,CAAC;AAClE,YAAY,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,EAAE,UAAU,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,EAAE,CAAC,CAAC,CAAC;AAC5H,SAAS;AACT,QAAQ,IAAI,WAAW,CAAC,UAAU,EAAE;AACpC,YAAY,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;AACrD,SAAS;AACT,QAAQ,IAAI,WAAW,CAAC,iBAAiB,EAAE;AAC3C;AACA,YAAY,MAAM,OAAO,GAAG,mBAAmB,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;AACxE,YAAY,KAAK,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,WAAW,CAAC,iBAAiB,CAAC,EAAE;AACvF,gBAAgB,IAAI,IAAI,IAAI,OAAO,EAAE;AACrC,oBAAoB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,uCAAuC,EAAE,KAAK,CAAC,CAAC,EAAE,UAAU,CAAC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AACzI,oBAAoB,IAAI,EAAE,KAAK,IAAI,OAAO,CAAC,EAAE;AAC7C,wBAAwB,OAAO,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;AACvD,qBAAqB;AACrB,oBAAoB,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC;AACzC,iBAAiB;AACjB,aAAa;AACb,YAAY,OAAO,mBAAmB,CAAC,OAAO,CAAC,CAAC;AAChD,SAAS;AACT;AACA,QAAQ,OAAO,mBAAmB,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5C,KAAK;AACL,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,eAAe,EAAE,mBAAmB,CAAC,CAAC;AAC/D,CAAC;;ACxDM,SAAS,mBAAmB,CAAC,OAAO,EAAE;AAC7C,IAAI,MAAM,GAAG,GAAG,kBAAkB,CAAC,OAAO,EAAEA,SAAS,CAAC,CAAC;AACvD,IAAI,OAAO;AACX,QAAQ,IAAI,EAAE,GAAG;AACjB,KAAK,CAAC;AACN,CAAC;AACD,mBAAmB,CAAC,OAAO,GAAG,OAAO,CAAC;AACtC,AAAO,SAAS,yBAAyB,CAAC,OAAO,EAAE;AACnD,IAAI,MAAM,GAAG,GAAG,kBAAkB,CAAC,OAAO,EAAEA,SAAS,CAAC,CAAC;AACvD,IAAI,OAAO;AACX,QAAQ,GAAG,GAAG;AACd,QAAQ,IAAI,EAAE,GAAG;AACjB,KAAK,CAAC;AACN,CAAC;AACD,yBAAyB,CAAC,OAAO,GAAG,OAAO,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/plugin-rest-endpoint-methods/package.json b/node_modules/@octokit/plugin-rest-endpoint-methods/package.json index 13604b98..cd04c604 100644 --- a/node_modules/@octokit/plugin-rest-endpoint-methods/package.json +++ b/node_modules/@octokit/plugin-rest-endpoint-methods/package.json @@ -1,7 +1,7 @@ { "name": "@octokit/plugin-rest-endpoint-methods", "description": "Octokit plugin adding one method for all of api.github.com REST API endpoints", - "version": "5.1.1", + "version": "5.3.1", "license": "MIT", "files": [ "dist-*/", @@ -17,7 +17,7 @@ ], "repository": "github:octokit/plugin-rest-endpoint-methods.js", "dependencies": { - "@octokit/types": "^6.14.1", + "@octokit/types": "^6.16.2", "deprecation": "^2.3.1" }, "peerDependencies": { @@ -35,8 +35,8 @@ "@types/jest": "^26.0.0", "@types/node": "^14.0.4", "fetch-mock": "^9.0.0", - "fs-extra": "^9.0.0", - "jest": "^26.1.0", + "fs-extra": "^10.0.0", + "jest": "^27.0.0", "lodash.camelcase": "^4.3.0", "lodash.set": "^4.3.2", "lodash.upperfirst": "^4.3.1", @@ -47,7 +47,7 @@ "semantic-release-plugin-update-version-in-files": "^1.0.0", "sort-keys": "^4.2.0", "string-to-jsdoc-comment": "^1.0.0", - "ts-jest": "^26.1.3", + "ts-jest": "^27.0.0-next.12", "typescript": "^4.0.2" }, "publishConfig": { diff --git a/node_modules/@octokit/request-error/README.md b/node_modules/@octokit/request-error/README.md index c939cdae..1bf53843 100644 --- a/node_modules/@octokit/request-error/README.md +++ b/node_modules/@octokit/request-error/README.md @@ -55,11 +55,11 @@ const error = new RequestError("Oops", 500, { error.message; // Oops error.status; // 500 -error.headers; // { 'x-github-request-id': '1:2:3:4' } error.request.method; // POST error.request.url; // https://api.github.com/foo error.request.body; // { bar: 'baz' } error.request.headers; // { authorization: 'token [REDACTED]' } +error.response; // { url, status, headers, data } ``` ## LICENSE diff --git a/node_modules/@octokit/request-error/dist-node/index.js b/node_modules/@octokit/request-error/dist-node/index.js index 95b9c579..619f462b 100644 --- a/node_modules/@octokit/request-error/dist-node/index.js +++ b/node_modules/@octokit/request-error/dist-node/index.js @@ -7,7 +7,8 @@ function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'defau var deprecation = require('deprecation'); var once = _interopDefault(require('once')); -const logOnce = once(deprecation => console.warn(deprecation)); +const logOnceCode = once(deprecation => console.warn(deprecation)); +const logOnceHeaders = once(deprecation => console.warn(deprecation)); /** * Error with extra properties to help with debugging */ @@ -24,14 +25,17 @@ class RequestError extends Error { this.name = "HttpError"; this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } + let headers; + + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } // redact request credentials without mutating original request options - }); - this.headers = options.headers || {}; // redact request credentials without mutating original request options const requestCopy = Object.assign({}, options.request); @@ -46,7 +50,22 @@ class RequestError extends Error { .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; + this.request = requestCopy; // deprecations + + Object.defineProperty(this, "code", { + get() { + logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + } + + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + } + + }); } } diff --git a/node_modules/@octokit/request-error/dist-node/index.js.map b/node_modules/@octokit/request-error/dist-node/index.js.map index f5d527e8..9134ddb4 100644 --- a/node_modules/@octokit/request-error/dist-node/index.js.map +++ b/node_modules/@octokit/request-error/dist-node/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/index.js"],"sourcesContent":["import { Deprecation } from \"deprecation\";\nimport once from \"once\";\nconst logOnce = once((deprecation) => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\nexport class RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message);\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n Object.defineProperty(this, \"code\", {\n get() {\n logOnce(new Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n },\n });\n this.headers = options.headers || {};\n // redact request credentials without mutating original request options\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\"),\n });\n }\n requestCopy.url = requestCopy.url\n // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\")\n // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n }\n}\n"],"names":["logOnce","once","deprecation","console","warn","RequestError","Error","constructor","message","statusCode","options","captureStackTrace","name","status","Object","defineProperty","get","Deprecation","headers","requestCopy","assign","request","authorization","replace","url"],"mappings":";;;;;;;;;AAEA,MAAMA,OAAO,GAAGC,IAAI,CAAEC,WAAD,IAAiBC,OAAO,CAACC,IAAR,CAAaF,WAAb,CAAlB,CAApB;AACA;AACA;AACA;;AACO,MAAMG,YAAN,SAA2BC,KAA3B,CAAiC;AACpCC,EAAAA,WAAW,CAACC,OAAD,EAAUC,UAAV,EAAsBC,OAAtB,EAA+B;AACtC,UAAMF,OAAN,EADsC;;AAGtC;;AACA,QAAIF,KAAK,CAACK,iBAAV,EAA6B;AACzBL,MAAAA,KAAK,CAACK,iBAAN,CAAwB,IAAxB,EAA8B,KAAKJ,WAAnC;AACH;;AACD,SAAKK,IAAL,GAAY,WAAZ;AACA,SAAKC,MAAL,GAAcJ,UAAd;AACAK,IAAAA,MAAM,CAACC,cAAP,CAAsB,IAAtB,EAA4B,MAA5B,EAAoC;AAChCC,MAAAA,GAAG,GAAG;AACFhB,QAAAA,OAAO,CAAC,IAAIiB,uBAAJ,CAAgB,0EAAhB,CAAD,CAAP;AACA,eAAOR,UAAP;AACH;;AAJ+B,KAApC;AAMA,SAAKS,OAAL,GAAeR,OAAO,CAACQ,OAAR,IAAmB,EAAlC,CAfsC;;AAiBtC,UAAMC,WAAW,GAAGL,MAAM,CAACM,MAAP,CAAc,EAAd,EAAkBV,OAAO,CAACW,OAA1B,CAApB;;AACA,QAAIX,OAAO,CAACW,OAAR,CAAgBH,OAAhB,CAAwBI,aAA5B,EAA2C;AACvCH,MAAAA,WAAW,CAACD,OAAZ,GAAsBJ,MAAM,CAACM,MAAP,CAAc,EAAd,EAAkBV,OAAO,CAACW,OAAR,CAAgBH,OAAlC,EAA2C;AAC7DI,QAAAA,aAAa,EAAEZ,OAAO,CAACW,OAAR,CAAgBH,OAAhB,CAAwBI,aAAxB,CAAsCC,OAAtC,CAA8C,MAA9C,EAAsD,aAAtD;AAD8C,OAA3C,CAAtB;AAGH;;AACDJ,IAAAA,WAAW,CAACK,GAAZ,GAAkBL,WAAW,CAACK,GAAZ;AAEd;AAFc,KAGbD,OAHa,CAGL,sBAHK,EAGmB,0BAHnB;AAKd;AALc,KAMbA,OANa,CAML,qBANK,EAMkB,yBANlB,CAAlB;AAOA,SAAKF,OAAL,GAAeF,WAAf;AACH;;AAhCmC;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/index.js"],"sourcesContent":["import { Deprecation } from \"deprecation\";\nimport once from \"once\";\nconst logOnceCode = once((deprecation) => console.warn(deprecation));\nconst logOnceHeaders = once((deprecation) => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\nexport class RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message);\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n // redact request credentials without mutating original request options\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\"),\n });\n }\n requestCopy.url = requestCopy.url\n // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\")\n // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n // deprecations\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n },\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n },\n });\n }\n}\n"],"names":["logOnceCode","once","deprecation","console","warn","logOnceHeaders","RequestError","Error","constructor","message","statusCode","options","captureStackTrace","name","status","headers","response","requestCopy","Object","assign","request","authorization","replace","url","defineProperty","get","Deprecation"],"mappings":";;;;;;;;;AAEA,MAAMA,WAAW,GAAGC,IAAI,CAAEC,WAAD,IAAiBC,OAAO,CAACC,IAAR,CAAaF,WAAb,CAAlB,CAAxB;AACA,MAAMG,cAAc,GAAGJ,IAAI,CAAEC,WAAD,IAAiBC,OAAO,CAACC,IAAR,CAAaF,WAAb,CAAlB,CAA3B;AACA;AACA;AACA;;AACO,MAAMI,YAAN,SAA2BC,KAA3B,CAAiC;AACpCC,EAAAA,WAAW,CAACC,OAAD,EAAUC,UAAV,EAAsBC,OAAtB,EAA+B;AACtC,UAAMF,OAAN,EADsC;;AAGtC;;AACA,QAAIF,KAAK,CAACK,iBAAV,EAA6B;AACzBL,MAAAA,KAAK,CAACK,iBAAN,CAAwB,IAAxB,EAA8B,KAAKJ,WAAnC;AACH;;AACD,SAAKK,IAAL,GAAY,WAAZ;AACA,SAAKC,MAAL,GAAcJ,UAAd;AACA,QAAIK,OAAJ;;AACA,QAAI,aAAaJ,OAAb,IAAwB,OAAOA,OAAO,CAACI,OAAf,KAA2B,WAAvD,EAAoE;AAChEA,MAAAA,OAAO,GAAGJ,OAAO,CAACI,OAAlB;AACH;;AACD,QAAI,cAAcJ,OAAlB,EAA2B;AACvB,WAAKK,QAAL,GAAgBL,OAAO,CAACK,QAAxB;AACAD,MAAAA,OAAO,GAAGJ,OAAO,CAACK,QAAR,CAAiBD,OAA3B;AACH,KAhBqC;;;AAkBtC,UAAME,WAAW,GAAGC,MAAM,CAACC,MAAP,CAAc,EAAd,EAAkBR,OAAO,CAACS,OAA1B,CAApB;;AACA,QAAIT,OAAO,CAACS,OAAR,CAAgBL,OAAhB,CAAwBM,aAA5B,EAA2C;AACvCJ,MAAAA,WAAW,CAACF,OAAZ,GAAsBG,MAAM,CAACC,MAAP,CAAc,EAAd,EAAkBR,OAAO,CAACS,OAAR,CAAgBL,OAAlC,EAA2C;AAC7DM,QAAAA,aAAa,EAAEV,OAAO,CAACS,OAAR,CAAgBL,OAAhB,CAAwBM,aAAxB,CAAsCC,OAAtC,CAA8C,MAA9C,EAAsD,aAAtD;AAD8C,OAA3C,CAAtB;AAGH;;AACDL,IAAAA,WAAW,CAACM,GAAZ,GAAkBN,WAAW,CAACM,GAAZ;AAEd;AAFc,KAGbD,OAHa,CAGL,sBAHK,EAGmB,0BAHnB;AAKd;AALc,KAMbA,OANa,CAML,qBANK,EAMkB,yBANlB,CAAlB;AAOA,SAAKF,OAAL,GAAeH,WAAf,CA/BsC;;AAiCtCC,IAAAA,MAAM,CAACM,cAAP,CAAsB,IAAtB,EAA4B,MAA5B,EAAoC;AAChCC,MAAAA,GAAG,GAAG;AACFzB,QAAAA,WAAW,CAAC,IAAI0B,uBAAJ,CAAgB,0EAAhB,CAAD,CAAX;AACA,eAAOhB,UAAP;AACH;;AAJ+B,KAApC;AAMAQ,IAAAA,MAAM,CAACM,cAAP,CAAsB,IAAtB,EAA4B,SAA5B,EAAuC;AACnCC,MAAAA,GAAG,GAAG;AACFpB,QAAAA,cAAc,CAAC,IAAIqB,uBAAJ,CAAgB,uFAAhB,CAAD,CAAd;AACA,eAAOX,OAAO,IAAI,EAAlB;AACH;;AAJkC,KAAvC;AAMH;;AA9CmC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/request-error/dist-src/index.js b/node_modules/@octokit/request-error/dist-src/index.js index c880b450..5eb19276 100644 --- a/node_modules/@octokit/request-error/dist-src/index.js +++ b/node_modules/@octokit/request-error/dist-src/index.js @@ -1,6 +1,7 @@ import { Deprecation } from "deprecation"; import once from "once"; -const logOnce = once((deprecation) => console.warn(deprecation)); +const logOnceCode = once((deprecation) => console.warn(deprecation)); +const logOnceHeaders = once((deprecation) => console.warn(deprecation)); /** * Error with extra properties to help with debugging */ @@ -14,13 +15,14 @@ export class RequestError extends Error { } this.name = "HttpError"; this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - }, - }); - this.headers = options.headers || {}; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } // redact request credentials without mutating original request options const requestCopy = Object.assign({}, options.request); if (options.request.headers.authorization) { @@ -36,5 +38,18 @@ export class RequestError extends Error { // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); this.request = requestCopy; + // deprecations + Object.defineProperty(this, "code", { + get() { + logOnceCode(new Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + }, + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + }, + }); } } diff --git a/node_modules/@octokit/request-error/dist-types/index.d.ts b/node_modules/@octokit/request-error/dist-types/index.d.ts index baa8a0eb..d6e089c9 100644 --- a/node_modules/@octokit/request-error/dist-types/index.d.ts +++ b/node_modules/@octokit/request-error/dist-types/index.d.ts @@ -1,4 +1,4 @@ -import { RequestOptions, ResponseHeaders } from "@octokit/types"; +import { RequestOptions, ResponseHeaders, OctokitResponse } from "@octokit/types"; import { RequestErrorOptions } from "./types"; /** * Error with extra properties to help with debugging @@ -15,13 +15,19 @@ export declare class RequestError extends Error { * @deprecated `error.code` is deprecated in favor of `error.status` */ code: number; + /** + * Request options that lead to the error. + */ + request: RequestOptions; /** * error response headers + * + * @deprecated `error.headers` is deprecated in favor of `error.response.headers` */ headers: ResponseHeaders; /** - * Request options that lead to the error. + * Response object if a response was received */ - request: RequestOptions; + response?: OctokitResponse; constructor(message: string, statusCode: number, options: RequestErrorOptions); } diff --git a/node_modules/@octokit/request-error/dist-types/types.d.ts b/node_modules/@octokit/request-error/dist-types/types.d.ts index 865d2139..7785231f 100644 --- a/node_modules/@octokit/request-error/dist-types/types.d.ts +++ b/node_modules/@octokit/request-error/dist-types/types.d.ts @@ -1,5 +1,9 @@ -import { RequestOptions, ResponseHeaders } from "@octokit/types"; +import { RequestOptions, ResponseHeaders, OctokitResponse } from "@octokit/types"; export declare type RequestErrorOptions = { + /** @deprecated set `response` instead */ headers?: ResponseHeaders; request: RequestOptions; +} | { + response: OctokitResponse; + request: RequestOptions; }; diff --git a/node_modules/@octokit/request-error/dist-web/index.js b/node_modules/@octokit/request-error/dist-web/index.js index feec58ef..0fb64be8 100644 --- a/node_modules/@octokit/request-error/dist-web/index.js +++ b/node_modules/@octokit/request-error/dist-web/index.js @@ -1,7 +1,8 @@ import { Deprecation } from 'deprecation'; import once from 'once'; -const logOnce = once((deprecation) => console.warn(deprecation)); +const logOnceCode = once((deprecation) => console.warn(deprecation)); +const logOnceHeaders = once((deprecation) => console.warn(deprecation)); /** * Error with extra properties to help with debugging */ @@ -15,13 +16,14 @@ class RequestError extends Error { } this.name = "HttpError"; this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - }, - }); - this.headers = options.headers || {}; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } // redact request credentials without mutating original request options const requestCopy = Object.assign({}, options.request); if (options.request.headers.authorization) { @@ -37,6 +39,19 @@ class RequestError extends Error { // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); this.request = requestCopy; + // deprecations + Object.defineProperty(this, "code", { + get() { + logOnceCode(new Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + }, + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + }, + }); } } diff --git a/node_modules/@octokit/request-error/dist-web/index.js.map b/node_modules/@octokit/request-error/dist-web/index.js.map index 130740d7..78f677f4 100644 --- a/node_modules/@octokit/request-error/dist-web/index.js.map +++ b/node_modules/@octokit/request-error/dist-web/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/index.js"],"sourcesContent":["import { Deprecation } from \"deprecation\";\nimport once from \"once\";\nconst logOnce = once((deprecation) => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\nexport class RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message);\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n Object.defineProperty(this, \"code\", {\n get() {\n logOnce(new Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n },\n });\n this.headers = options.headers || {};\n // redact request credentials without mutating original request options\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\"),\n });\n }\n requestCopy.url = requestCopy.url\n // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\")\n // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n }\n}\n"],"names":[],"mappings":";;;AAEA,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,WAAW,KAAK,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;AACjE;AACA;AACA;AACO,MAAM,YAAY,SAAS,KAAK,CAAC;AACxC,IAAI,WAAW,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,EAAE;AAC9C,QAAQ,KAAK,CAAC,OAAO,CAAC,CAAC;AACvB;AACA;AACA,QAAQ,IAAI,KAAK,CAAC,iBAAiB,EAAE;AACrC,YAAY,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;AAC5D,SAAS;AACT,QAAQ,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;AAChC,QAAQ,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;AACjC,QAAQ,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE;AAC5C,YAAY,GAAG,GAAG;AAClB,gBAAgB,OAAO,CAAC,IAAI,WAAW,CAAC,0EAA0E,CAAC,CAAC,CAAC;AACrH,gBAAgB,OAAO,UAAU,CAAC;AAClC,aAAa;AACb,SAAS,CAAC,CAAC;AACX,QAAQ,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,EAAE,CAAC;AAC7C;AACA,QAAQ,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;AAC/D,QAAQ,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,EAAE;AACnD,YAAY,WAAW,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,OAAO,EAAE;AAC7E,gBAAgB,aAAa,EAAE,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC,OAAO,CAAC,MAAM,EAAE,aAAa,CAAC;AACnG,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,WAAW,CAAC,GAAG,GAAG,WAAW,CAAC,GAAG;AACzC;AACA;AACA,aAAa,OAAO,CAAC,sBAAsB,EAAE,0BAA0B,CAAC;AACxE;AACA;AACA,aAAa,OAAO,CAAC,qBAAqB,EAAE,yBAAyB,CAAC,CAAC;AACvE,QAAQ,IAAI,CAAC,OAAO,GAAG,WAAW,CAAC;AACnC,KAAK;AACL;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/index.js"],"sourcesContent":["import { Deprecation } from \"deprecation\";\nimport once from \"once\";\nconst logOnceCode = once((deprecation) => console.warn(deprecation));\nconst logOnceHeaders = once((deprecation) => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\nexport class RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message);\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n // redact request credentials without mutating original request options\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\"),\n });\n }\n requestCopy.url = requestCopy.url\n // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\")\n // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n // deprecations\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n },\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n },\n });\n }\n}\n"],"names":[],"mappings":";;;AAEA,MAAM,WAAW,GAAG,IAAI,CAAC,CAAC,WAAW,KAAK,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;AACrE,MAAM,cAAc,GAAG,IAAI,CAAC,CAAC,WAAW,KAAK,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;AACxE;AACA;AACA;AACO,MAAM,YAAY,SAAS,KAAK,CAAC;AACxC,IAAI,WAAW,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,EAAE;AAC9C,QAAQ,KAAK,CAAC,OAAO,CAAC,CAAC;AACvB;AACA;AACA,QAAQ,IAAI,KAAK,CAAC,iBAAiB,EAAE;AACrC,YAAY,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;AAC5D,SAAS;AACT,QAAQ,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;AAChC,QAAQ,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;AACjC,QAAQ,IAAI,OAAO,CAAC;AACpB,QAAQ,IAAI,SAAS,IAAI,OAAO,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,WAAW,EAAE;AAC5E,YAAY,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACtC,SAAS;AACT,QAAQ,IAAI,UAAU,IAAI,OAAO,EAAE;AACnC,YAAY,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;AAC7C,YAAY,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC;AAC/C,SAAS;AACT;AACA,QAAQ,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;AAC/D,QAAQ,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,EAAE;AACnD,YAAY,WAAW,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,OAAO,EAAE;AAC7E,gBAAgB,aAAa,EAAE,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC,OAAO,CAAC,MAAM,EAAE,aAAa,CAAC;AACnG,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,WAAW,CAAC,GAAG,GAAG,WAAW,CAAC,GAAG;AACzC;AACA;AACA,aAAa,OAAO,CAAC,sBAAsB,EAAE,0BAA0B,CAAC;AACxE;AACA;AACA,aAAa,OAAO,CAAC,qBAAqB,EAAE,yBAAyB,CAAC,CAAC;AACvE,QAAQ,IAAI,CAAC,OAAO,GAAG,WAAW,CAAC;AACnC;AACA,QAAQ,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE;AAC5C,YAAY,GAAG,GAAG;AAClB,gBAAgB,WAAW,CAAC,IAAI,WAAW,CAAC,0EAA0E,CAAC,CAAC,CAAC;AACzH,gBAAgB,OAAO,UAAU,CAAC;AAClC,aAAa;AACb,SAAS,CAAC,CAAC;AACX,QAAQ,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,SAAS,EAAE;AAC/C,YAAY,GAAG,GAAG;AAClB,gBAAgB,cAAc,CAAC,IAAI,WAAW,CAAC,uFAAuF,CAAC,CAAC,CAAC;AACzI,gBAAgB,OAAO,OAAO,IAAI,EAAE,CAAC;AACrC,aAAa;AACb,SAAS,CAAC,CAAC;AACX,KAAK;AACL;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/request-error/package.json b/node_modules/@octokit/request-error/package.json index 4c41600e..2f5b2394 100644 --- a/node_modules/@octokit/request-error/package.json +++ b/node_modules/@octokit/request-error/package.json @@ -1,7 +1,7 @@ { "name": "@octokit/request-error", "description": "Error class for Octokit request errors", - "version": "2.0.5", + "version": "2.1.0", "license": "MIT", "files": [ "dist-*/", @@ -15,14 +15,7 @@ "api", "error" ], - "homepage": "https://github.com/octokit/request-error.js#readme", - "bugs": { - "url": "https://github.com/octokit/request-error.js/issues" - }, - "repository": { - "type": "git", - "url": "https://github.com/octokit/request-error.js.git" - }, + "repository": "github:octokit/request-error.js", "dependencies": { "@octokit/types": "^6.0.3", "deprecation": "^2.0.0", @@ -37,11 +30,11 @@ "@types/jest": "^26.0.0", "@types/node": "^14.0.4", "@types/once": "^1.4.0", - "jest": "^26.0.0", + "jest": "^27.0.0", "pika-plugin-unpkg-field": "^1.1.0", - "prettier": "^2.0.1", + "prettier": "2.3.1", "semantic-release": "^17.0.0", - "ts-jest": "^26.0.0", + "ts-jest": "^27.0.0-next.12", "typescript": "^4.0.0" }, "publishConfig": { diff --git a/node_modules/@octokit/request/README.md b/node_modules/@octokit/request/README.md index 514eb6e2..e9487974 100644 --- a/node_modules/@octokit/request/README.md +++ b/node_modules/@octokit/request/README.md @@ -343,6 +343,16 @@ const { data: app } = await requestWithAuth( Use an AbortController instance to cancel a request. In node you can only cancel streamed requests.
+ options.request.log + + object + + Used for internal logging. Defaults to console. +
@@ -406,8 +416,8 @@ All other options except `options.request.*` will be passed depending on the `me If an error occurs, the `error` instance has additional properties to help with debugging - `error.status` The http response status code -- `error.headers` The http response headers as an object - `error.request` The request options such as `method`, `url` and `data` +- `error.response` The http response object with `url`, `headers`, and `data` ## `request.defaults()` diff --git a/node_modules/@octokit/request/dist-node/index.js b/node_modules/@octokit/request/dist-node/index.js index e175f9eb..e3a812e6 100644 --- a/node_modules/@octokit/request/dist-node/index.js +++ b/node_modules/@octokit/request/dist-node/index.js @@ -10,13 +10,15 @@ var isPlainObject = require('is-plain-object'); var nodeFetch = _interopDefault(require('node-fetch')); var requestError = require('@octokit/request-error'); -const VERSION = "5.4.15"; +const VERSION = "5.6.0"; function getBufferResponse(response) { return response.arrayBuffer(); } function fetchWrapper(requestOptions) { + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { requestOptions.body = JSON.stringify(requestOptions.body); } @@ -32,7 +34,7 @@ function fetchWrapper(requestOptions) { redirect: requestOptions.redirect }, // `requestOptions.request.agent` type is incompatible // see https://github.com/octokit/types.ts/pull/264 - requestOptions.request)).then(response => { + requestOptions.request)).then(async response => { url = response.url; status = response.status; @@ -40,6 +42,12 @@ function fetchWrapper(requestOptions) { headers[keyAndValue[0]] = keyAndValue[1]; } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); + } + if (status === 204 || status === 205) { return; } // GitHub API returns 200 for HEAD requests @@ -51,49 +59,43 @@ function fetchWrapper(requestOptions) { } throw new requestError.RequestError(response.statusText, status, { - headers, + response: { + url, + status, + headers, + data: undefined + }, request: requestOptions }); } if (status === 304) { throw new requestError.RequestError("Not modified", status, { - headers, + response: { + url, + status, + headers, + data: await getResponseData(response) + }, request: requestOptions }); } if (status >= 400) { - return response.text().then(message => { - const error = new requestError.RequestError(message, status, { + const data = await getResponseData(response); + const error = new requestError.RequestError(toErrorMessage(data), status, { + response: { + url, + status, headers, - request: requestOptions - }); - - try { - let responseBody = JSON.parse(error.message); - Object.assign(error, responseBody); - let errors = responseBody.errors; // Assumption `errors` would always be in Array format - - error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); - } catch (e) {// ignore, see octokit/rest.js#684 - } - - throw error; + data + }, + request: requestOptions }); + throw error; } - const contentType = response.headers.get("content-type"); - - if (/application\/json/.test(contentType)) { - return response.json(); - } - - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - - return getBufferResponse(response); + return getResponseData(response); }).then(data => { return { status, @@ -102,17 +104,42 @@ function fetchWrapper(requestOptions) { data }; }).catch(error => { - if (error instanceof requestError.RequestError) { - throw error; - } - + if (error instanceof requestError.RequestError) throw error; throw new requestError.RequestError(error.message, 500, { - headers, request: requestOptions }); }); } +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + + if (/application\/json/.test(contentType)) { + return response.json(); + } + + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + + return getBufferResponse(response); +} + +function toErrorMessage(data) { + if (typeof data === "string") return data; // istanbul ignore else - just in case + + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + + return data.message; + } // istanbul ignore next - just in case + + + return `Unknown error: ${JSON.stringify(data)}`; +} + function withDefaults(oldEndpoint, newDefaults) { const endpoint = oldEndpoint.defaults(newDefaults); diff --git a/node_modules/@octokit/request/dist-node/index.js.map b/node_modules/@octokit/request/dist-node/index.js.map index e1c22d5d..6955ce79 100644 --- a/node_modules/@octokit/request/dist-node/index.js.map +++ b/node_modules/@octokit/request/dist-node/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/get-buffer-response.js","../dist-src/fetch-wrapper.js","../dist-src/with-defaults.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"5.4.15\";\n","export default function getBufferResponse(response) {\n return response.arrayBuffer();\n}\n","import { isPlainObject } from \"is-plain-object\";\nimport nodeFetch from \"node-fetch\";\nimport { RequestError } from \"@octokit/request-error\";\nimport getBuffer from \"./get-buffer-response\";\nexport default function fetchWrapper(requestOptions) {\n if (isPlainObject(requestOptions.body) ||\n Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n const fetch = (requestOptions.request && requestOptions.request.fetch) || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect,\n }, \n // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request))\n .then((response) => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (status === 204 || status === 205) {\n return;\n }\n // GitHub API returns 200 for HEAD requests\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new RequestError(response.statusText, status, {\n headers,\n request: requestOptions,\n });\n }\n if (status === 304) {\n throw new RequestError(\"Not modified\", status, {\n headers,\n request: requestOptions,\n });\n }\n if (status >= 400) {\n return response\n .text()\n .then((message) => {\n const error = new RequestError(message, status, {\n headers,\n request: requestOptions,\n });\n try {\n let responseBody = JSON.parse(error.message);\n Object.assign(error, responseBody);\n let errors = responseBody.errors;\n // Assumption `errors` would always be in Array format\n error.message =\n error.message + \": \" + errors.map(JSON.stringify).join(\", \");\n }\n catch (e) {\n // ignore, see octokit/rest.js#684\n }\n throw error;\n });\n }\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBuffer(response);\n })\n .then((data) => {\n return {\n status,\n url,\n headers,\n data,\n };\n })\n .catch((error) => {\n if (error instanceof RequestError) {\n throw error;\n }\n throw new RequestError(error.message, 500, {\n headers,\n request: requestOptions,\n });\n });\n}\n","import fetchWrapper from \"./fetch-wrapper\";\nexport default function withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint),\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint),\n });\n}\n","import { endpoint } from \"@octokit/endpoint\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport withDefaults from \"./with-defaults\";\nexport const request = withDefaults(endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${getUserAgent()}`,\n },\n});\n"],"names":["VERSION","getBufferResponse","response","arrayBuffer","fetchWrapper","requestOptions","isPlainObject","body","Array","isArray","JSON","stringify","headers","status","url","fetch","request","nodeFetch","Object","assign","method","redirect","then","keyAndValue","RequestError","statusText","text","message","error","responseBody","parse","errors","map","join","e","contentType","get","test","json","getBuffer","data","catch","withDefaults","oldEndpoint","newDefaults","endpoint","defaults","newApi","route","parameters","endpointOptions","merge","hook","bind","getUserAgent"],"mappings":";;;;;;;;;;;;AAAO,MAAMA,OAAO,GAAG,mBAAhB;;ACAQ,SAASC,iBAAT,CAA2BC,QAA3B,EAAqC;AAChD,SAAOA,QAAQ,CAACC,WAAT,EAAP;AACH;;ACEc,SAASC,YAAT,CAAsBC,cAAtB,EAAsC;AACjD,MAAIC,2BAAa,CAACD,cAAc,CAACE,IAAhB,CAAb,IACAC,KAAK,CAACC,OAAN,CAAcJ,cAAc,CAACE,IAA7B,CADJ,EACwC;AACpCF,IAAAA,cAAc,CAACE,IAAf,GAAsBG,IAAI,CAACC,SAAL,CAAeN,cAAc,CAACE,IAA9B,CAAtB;AACH;;AACD,MAAIK,OAAO,GAAG,EAAd;AACA,MAAIC,MAAJ;AACA,MAAIC,GAAJ;AACA,QAAMC,KAAK,GAAIV,cAAc,CAACW,OAAf,IAA0BX,cAAc,CAACW,OAAf,CAAuBD,KAAlD,IAA4DE,SAA1E;AACA,SAAOF,KAAK,CAACV,cAAc,CAACS,GAAhB,EAAqBI,MAAM,CAACC,MAAP,CAAc;AAC3CC,IAAAA,MAAM,EAAEf,cAAc,CAACe,MADoB;AAE3Cb,IAAAA,IAAI,EAAEF,cAAc,CAACE,IAFsB;AAG3CK,IAAAA,OAAO,EAAEP,cAAc,CAACO,OAHmB;AAI3CS,IAAAA,QAAQ,EAAEhB,cAAc,CAACgB;AAJkB,GAAd;AAOjC;AACAhB,EAAAA,cAAc,CAACW,OARkB,CAArB,CAAL,CASFM,IATE,CASIpB,QAAD,IAAc;AACpBY,IAAAA,GAAG,GAAGZ,QAAQ,CAACY,GAAf;AACAD,IAAAA,MAAM,GAAGX,QAAQ,CAACW,MAAlB;;AACA,SAAK,MAAMU,WAAX,IAA0BrB,QAAQ,CAACU,OAAnC,EAA4C;AACxCA,MAAAA,OAAO,CAACW,WAAW,CAAC,CAAD,CAAZ,CAAP,GAA0BA,WAAW,CAAC,CAAD,CAArC;AACH;;AACD,QAAIV,MAAM,KAAK,GAAX,IAAkBA,MAAM,KAAK,GAAjC,EAAsC;AAClC;AACH,KARmB;;;AAUpB,QAAIR,cAAc,CAACe,MAAf,KAA0B,MAA9B,EAAsC;AAClC,UAAIP,MAAM,GAAG,GAAb,EAAkB;AACd;AACH;;AACD,YAAM,IAAIW,yBAAJ,CAAiBtB,QAAQ,CAACuB,UAA1B,EAAsCZ,MAAtC,EAA8C;AAChDD,QAAAA,OADgD;AAEhDI,QAAAA,OAAO,EAAEX;AAFuC,OAA9C,CAAN;AAIH;;AACD,QAAIQ,MAAM,KAAK,GAAf,EAAoB;AAChB,YAAM,IAAIW,yBAAJ,CAAiB,cAAjB,EAAiCX,MAAjC,EAAyC;AAC3CD,QAAAA,OAD2C;AAE3CI,QAAAA,OAAO,EAAEX;AAFkC,OAAzC,CAAN;AAIH;;AACD,QAAIQ,MAAM,IAAI,GAAd,EAAmB;AACf,aAAOX,QAAQ,CACVwB,IADE,GAEFJ,IAFE,CAEIK,OAAD,IAAa;AACnB,cAAMC,KAAK,GAAG,IAAIJ,yBAAJ,CAAiBG,OAAjB,EAA0Bd,MAA1B,EAAkC;AAC5CD,UAAAA,OAD4C;AAE5CI,UAAAA,OAAO,EAAEX;AAFmC,SAAlC,CAAd;;AAIA,YAAI;AACA,cAAIwB,YAAY,GAAGnB,IAAI,CAACoB,KAAL,CAAWF,KAAK,CAACD,OAAjB,CAAnB;AACAT,UAAAA,MAAM,CAACC,MAAP,CAAcS,KAAd,EAAqBC,YAArB;AACA,cAAIE,MAAM,GAAGF,YAAY,CAACE,MAA1B,CAHA;;AAKAH,UAAAA,KAAK,CAACD,OAAN,GACIC,KAAK,CAACD,OAAN,GAAgB,IAAhB,GAAuBI,MAAM,CAACC,GAAP,CAAWtB,IAAI,CAACC,SAAhB,EAA2BsB,IAA3B,CAAgC,IAAhC,CAD3B;AAEH,SAPD,CAQA,OAAOC,CAAP,EAAU;AAET;;AACD,cAAMN,KAAN;AACH,OAnBM,CAAP;AAoBH;;AACD,UAAMO,WAAW,GAAGjC,QAAQ,CAACU,OAAT,CAAiBwB,GAAjB,CAAqB,cAArB,CAApB;;AACA,QAAI,oBAAoBC,IAApB,CAAyBF,WAAzB,CAAJ,EAA2C;AACvC,aAAOjC,QAAQ,CAACoC,IAAT,EAAP;AACH;;AACD,QAAI,CAACH,WAAD,IAAgB,yBAAyBE,IAAzB,CAA8BF,WAA9B,CAApB,EAAgE;AAC5D,aAAOjC,QAAQ,CAACwB,IAAT,EAAP;AACH;;AACD,WAAOa,iBAAS,CAACrC,QAAD,CAAhB;AACH,GAhEM,EAiEFoB,IAjEE,CAiEIkB,IAAD,IAAU;AAChB,WAAO;AACH3B,MAAAA,MADG;AAEHC,MAAAA,GAFG;AAGHF,MAAAA,OAHG;AAIH4B,MAAAA;AAJG,KAAP;AAMH,GAxEM,EAyEFC,KAzEE,CAyEKb,KAAD,IAAW;AAClB,QAAIA,KAAK,YAAYJ,yBAArB,EAAmC;AAC/B,YAAMI,KAAN;AACH;;AACD,UAAM,IAAIJ,yBAAJ,CAAiBI,KAAK,CAACD,OAAvB,EAAgC,GAAhC,EAAqC;AACvCf,MAAAA,OADuC;AAEvCI,MAAAA,OAAO,EAAEX;AAF8B,KAArC,CAAN;AAIH,GAjFM,CAAP;AAkFH;;AC9Fc,SAASqC,YAAT,CAAsBC,WAAtB,EAAmCC,WAAnC,EAAgD;AAC3D,QAAMC,QAAQ,GAAGF,WAAW,CAACG,QAAZ,CAAqBF,WAArB,CAAjB;;AACA,QAAMG,MAAM,GAAG,UAAUC,KAAV,EAAiBC,UAAjB,EAA6B;AACxC,UAAMC,eAAe,GAAGL,QAAQ,CAACM,KAAT,CAAeH,KAAf,EAAsBC,UAAtB,CAAxB;;AACA,QAAI,CAACC,eAAe,CAAClC,OAAjB,IAA4B,CAACkC,eAAe,CAAClC,OAAhB,CAAwBoC,IAAzD,EAA+D;AAC3D,aAAOhD,YAAY,CAACyC,QAAQ,CAACf,KAAT,CAAeoB,eAAf,CAAD,CAAnB;AACH;;AACD,UAAMlC,OAAO,GAAG,CAACgC,KAAD,EAAQC,UAAR,KAAuB;AACnC,aAAO7C,YAAY,CAACyC,QAAQ,CAACf,KAAT,CAAee,QAAQ,CAACM,KAAT,CAAeH,KAAf,EAAsBC,UAAtB,CAAf,CAAD,CAAnB;AACH,KAFD;;AAGA/B,IAAAA,MAAM,CAACC,MAAP,CAAcH,OAAd,EAAuB;AACnB6B,MAAAA,QADmB;AAEnBC,MAAAA,QAAQ,EAAEJ,YAAY,CAACW,IAAb,CAAkB,IAAlB,EAAwBR,QAAxB;AAFS,KAAvB;AAIA,WAAOK,eAAe,CAAClC,OAAhB,CAAwBoC,IAAxB,CAA6BpC,OAA7B,EAAsCkC,eAAtC,CAAP;AACH,GAbD;;AAcA,SAAOhC,MAAM,CAACC,MAAP,CAAc4B,MAAd,EAAsB;AACzBF,IAAAA,QADyB;AAEzBC,IAAAA,QAAQ,EAAEJ,YAAY,CAACW,IAAb,CAAkB,IAAlB,EAAwBR,QAAxB;AAFe,GAAtB,CAAP;AAIH;;MCjBY7B,OAAO,GAAG0B,YAAY,CAACG,iBAAD,EAAW;AAC1CjC,EAAAA,OAAO,EAAE;AACL,kBAAe,sBAAqBZ,OAAQ,IAAGsD,+BAAY,EAAG;AADzD;AADiC,CAAX,CAA5B;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/get-buffer-response.js","../dist-src/fetch-wrapper.js","../dist-src/with-defaults.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"5.6.0\";\n","export default function getBufferResponse(response) {\n return response.arrayBuffer();\n}\n","import { isPlainObject } from \"is-plain-object\";\nimport nodeFetch from \"node-fetch\";\nimport { RequestError } from \"@octokit/request-error\";\nimport getBuffer from \"./get-buffer-response\";\nexport default function fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log\n ? requestOptions.request.log\n : console;\n if (isPlainObject(requestOptions.body) ||\n Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n const fetch = (requestOptions.request && requestOptions.request.fetch) || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect,\n }, \n // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request))\n .then(async (response) => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n if (status === 204 || status === 205) {\n return;\n }\n // GitHub API returns 200 for HEAD requests\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined,\n },\n request: requestOptions,\n });\n }\n if (status === 304) {\n throw new RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response),\n },\n request: requestOptions,\n });\n }\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data,\n },\n request: requestOptions,\n });\n throw error;\n }\n return getResponseData(response);\n })\n .then((data) => {\n return {\n status,\n url,\n headers,\n data,\n };\n })\n .catch((error) => {\n if (error instanceof RequestError)\n throw error;\n throw new RequestError(error.message, 500, {\n request: requestOptions,\n });\n });\n}\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBuffer(response);\n}\nfunction toErrorMessage(data) {\n if (typeof data === \"string\")\n return data;\n // istanbul ignore else - just in case\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n return data.message;\n }\n // istanbul ignore next - just in case\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n","import fetchWrapper from \"./fetch-wrapper\";\nexport default function withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint),\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint),\n });\n}\n","import { endpoint } from \"@octokit/endpoint\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport withDefaults from \"./with-defaults\";\nexport const request = withDefaults(endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${getUserAgent()}`,\n },\n});\n"],"names":["VERSION","getBufferResponse","response","arrayBuffer","fetchWrapper","requestOptions","log","request","console","isPlainObject","body","Array","isArray","JSON","stringify","headers","status","url","fetch","nodeFetch","Object","assign","method","redirect","then","keyAndValue","matches","link","match","deprecationLink","pop","warn","sunset","RequestError","statusText","data","undefined","getResponseData","error","toErrorMessage","catch","message","contentType","get","test","json","text","getBuffer","errors","map","join","withDefaults","oldEndpoint","newDefaults","endpoint","defaults","newApi","route","parameters","endpointOptions","merge","hook","parse","bind","getUserAgent"],"mappings":";;;;;;;;;;;;AAAO,MAAMA,OAAO,GAAG,mBAAhB;;ACAQ,SAASC,iBAAT,CAA2BC,QAA3B,EAAqC;AAChD,SAAOA,QAAQ,CAACC,WAAT,EAAP;AACH;;ACEc,SAASC,YAAT,CAAsBC,cAAtB,EAAsC;AACjD,QAAMC,GAAG,GAAGD,cAAc,CAACE,OAAf,IAA0BF,cAAc,CAACE,OAAf,CAAuBD,GAAjD,GACND,cAAc,CAACE,OAAf,CAAuBD,GADjB,GAENE,OAFN;;AAGA,MAAIC,2BAAa,CAACJ,cAAc,CAACK,IAAhB,CAAb,IACAC,KAAK,CAACC,OAAN,CAAcP,cAAc,CAACK,IAA7B,CADJ,EACwC;AACpCL,IAAAA,cAAc,CAACK,IAAf,GAAsBG,IAAI,CAACC,SAAL,CAAeT,cAAc,CAACK,IAA9B,CAAtB;AACH;;AACD,MAAIK,OAAO,GAAG,EAAd;AACA,MAAIC,MAAJ;AACA,MAAIC,GAAJ;AACA,QAAMC,KAAK,GAAIb,cAAc,CAACE,OAAf,IAA0BF,cAAc,CAACE,OAAf,CAAuBW,KAAlD,IAA4DC,SAA1E;AACA,SAAOD,KAAK,CAACb,cAAc,CAACY,GAAhB,EAAqBG,MAAM,CAACC,MAAP,CAAc;AAC3CC,IAAAA,MAAM,EAAEjB,cAAc,CAACiB,MADoB;AAE3CZ,IAAAA,IAAI,EAAEL,cAAc,CAACK,IAFsB;AAG3CK,IAAAA,OAAO,EAAEV,cAAc,CAACU,OAHmB;AAI3CQ,IAAAA,QAAQ,EAAElB,cAAc,CAACkB;AAJkB,GAAd;AAOjC;AACAlB,EAAAA,cAAc,CAACE,OARkB,CAArB,CAAL,CASFiB,IATE,CASG,MAAOtB,QAAP,IAAoB;AAC1Be,IAAAA,GAAG,GAAGf,QAAQ,CAACe,GAAf;AACAD,IAAAA,MAAM,GAAGd,QAAQ,CAACc,MAAlB;;AACA,SAAK,MAAMS,WAAX,IAA0BvB,QAAQ,CAACa,OAAnC,EAA4C;AACxCA,MAAAA,OAAO,CAACU,WAAW,CAAC,CAAD,CAAZ,CAAP,GAA0BA,WAAW,CAAC,CAAD,CAArC;AACH;;AACD,QAAI,iBAAiBV,OAArB,EAA8B;AAC1B,YAAMW,OAAO,GAAGX,OAAO,CAACY,IAAR,IAAgBZ,OAAO,CAACY,IAAR,CAAaC,KAAb,CAAmB,8BAAnB,CAAhC;AACA,YAAMC,eAAe,GAAGH,OAAO,IAAIA,OAAO,CAACI,GAAR,EAAnC;AACAxB,MAAAA,GAAG,CAACyB,IAAJ,CAAU,uBAAsB1B,cAAc,CAACiB,MAAO,IAAGjB,cAAc,CAACY,GAAI,qDAAoDF,OAAO,CAACiB,MAAO,GAAEH,eAAe,GAAI,SAAQA,eAAgB,EAA5B,GAAgC,EAAG,EAAnM;AACH;;AACD,QAAIb,MAAM,KAAK,GAAX,IAAkBA,MAAM,KAAK,GAAjC,EAAsC;AAClC;AACH,KAbyB;;;AAe1B,QAAIX,cAAc,CAACiB,MAAf,KAA0B,MAA9B,EAAsC;AAClC,UAAIN,MAAM,GAAG,GAAb,EAAkB;AACd;AACH;;AACD,YAAM,IAAIiB,yBAAJ,CAAiB/B,QAAQ,CAACgC,UAA1B,EAAsClB,MAAtC,EAA8C;AAChDd,QAAAA,QAAQ,EAAE;AACNe,UAAAA,GADM;AAEND,UAAAA,MAFM;AAGND,UAAAA,OAHM;AAINoB,UAAAA,IAAI,EAAEC;AAJA,SADsC;AAOhD7B,QAAAA,OAAO,EAAEF;AAPuC,OAA9C,CAAN;AASH;;AACD,QAAIW,MAAM,KAAK,GAAf,EAAoB;AAChB,YAAM,IAAIiB,yBAAJ,CAAiB,cAAjB,EAAiCjB,MAAjC,EAAyC;AAC3Cd,QAAAA,QAAQ,EAAE;AACNe,UAAAA,GADM;AAEND,UAAAA,MAFM;AAGND,UAAAA,OAHM;AAINoB,UAAAA,IAAI,EAAE,MAAME,eAAe,CAACnC,QAAD;AAJrB,SADiC;AAO3CK,QAAAA,OAAO,EAAEF;AAPkC,OAAzC,CAAN;AASH;;AACD,QAAIW,MAAM,IAAI,GAAd,EAAmB;AACf,YAAMmB,IAAI,GAAG,MAAME,eAAe,CAACnC,QAAD,CAAlC;AACA,YAAMoC,KAAK,GAAG,IAAIL,yBAAJ,CAAiBM,cAAc,CAACJ,IAAD,CAA/B,EAAuCnB,MAAvC,EAA+C;AACzDd,QAAAA,QAAQ,EAAE;AACNe,UAAAA,GADM;AAEND,UAAAA,MAFM;AAGND,UAAAA,OAHM;AAINoB,UAAAA;AAJM,SAD+C;AAOzD5B,QAAAA,OAAO,EAAEF;AAPgD,OAA/C,CAAd;AASA,YAAMiC,KAAN;AACH;;AACD,WAAOD,eAAe,CAACnC,QAAD,CAAtB;AACH,GA/DM,EAgEFsB,IAhEE,CAgEIW,IAAD,IAAU;AAChB,WAAO;AACHnB,MAAAA,MADG;AAEHC,MAAAA,GAFG;AAGHF,MAAAA,OAHG;AAIHoB,MAAAA;AAJG,KAAP;AAMH,GAvEM,EAwEFK,KAxEE,CAwEKF,KAAD,IAAW;AAClB,QAAIA,KAAK,YAAYL,yBAArB,EACI,MAAMK,KAAN;AACJ,UAAM,IAAIL,yBAAJ,CAAiBK,KAAK,CAACG,OAAvB,EAAgC,GAAhC,EAAqC;AACvClC,MAAAA,OAAO,EAAEF;AAD8B,KAArC,CAAN;AAGH,GA9EM,CAAP;AA+EH;;AACD,eAAegC,eAAf,CAA+BnC,QAA/B,EAAyC;AACrC,QAAMwC,WAAW,GAAGxC,QAAQ,CAACa,OAAT,CAAiB4B,GAAjB,CAAqB,cAArB,CAApB;;AACA,MAAI,oBAAoBC,IAApB,CAAyBF,WAAzB,CAAJ,EAA2C;AACvC,WAAOxC,QAAQ,CAAC2C,IAAT,EAAP;AACH;;AACD,MAAI,CAACH,WAAD,IAAgB,yBAAyBE,IAAzB,CAA8BF,WAA9B,CAApB,EAAgE;AAC5D,WAAOxC,QAAQ,CAAC4C,IAAT,EAAP;AACH;;AACD,SAAOC,iBAAS,CAAC7C,QAAD,CAAhB;AACH;;AACD,SAASqC,cAAT,CAAwBJ,IAAxB,EAA8B;AAC1B,MAAI,OAAOA,IAAP,KAAgB,QAApB,EACI,OAAOA,IAAP,CAFsB;;AAI1B,MAAI,aAAaA,IAAjB,EAAuB;AACnB,QAAIxB,KAAK,CAACC,OAAN,CAAcuB,IAAI,CAACa,MAAnB,CAAJ,EAAgC;AAC5B,aAAQ,GAAEb,IAAI,CAACM,OAAQ,KAAIN,IAAI,CAACa,MAAL,CAAYC,GAAZ,CAAgBpC,IAAI,CAACC,SAArB,EAAgCoC,IAAhC,CAAqC,IAArC,CAA2C,EAAtE;AACH;;AACD,WAAOf,IAAI,CAACM,OAAZ;AACH,GATyB;;;AAW1B,SAAQ,kBAAiB5B,IAAI,CAACC,SAAL,CAAeqB,IAAf,CAAqB,EAA9C;AACH;;ACrHc,SAASgB,YAAT,CAAsBC,WAAtB,EAAmCC,WAAnC,EAAgD;AAC3D,QAAMC,QAAQ,GAAGF,WAAW,CAACG,QAAZ,CAAqBF,WAArB,CAAjB;;AACA,QAAMG,MAAM,GAAG,UAAUC,KAAV,EAAiBC,UAAjB,EAA6B;AACxC,UAAMC,eAAe,GAAGL,QAAQ,CAACM,KAAT,CAAeH,KAAf,EAAsBC,UAAtB,CAAxB;;AACA,QAAI,CAACC,eAAe,CAACpD,OAAjB,IAA4B,CAACoD,eAAe,CAACpD,OAAhB,CAAwBsD,IAAzD,EAA+D;AAC3D,aAAOzD,YAAY,CAACkD,QAAQ,CAACQ,KAAT,CAAeH,eAAf,CAAD,CAAnB;AACH;;AACD,UAAMpD,OAAO,GAAG,CAACkD,KAAD,EAAQC,UAAR,KAAuB;AACnC,aAAOtD,YAAY,CAACkD,QAAQ,CAACQ,KAAT,CAAeR,QAAQ,CAACM,KAAT,CAAeH,KAAf,EAAsBC,UAAtB,CAAf,CAAD,CAAnB;AACH,KAFD;;AAGAtC,IAAAA,MAAM,CAACC,MAAP,CAAcd,OAAd,EAAuB;AACnB+C,MAAAA,QADmB;AAEnBC,MAAAA,QAAQ,EAAEJ,YAAY,CAACY,IAAb,CAAkB,IAAlB,EAAwBT,QAAxB;AAFS,KAAvB;AAIA,WAAOK,eAAe,CAACpD,OAAhB,CAAwBsD,IAAxB,CAA6BtD,OAA7B,EAAsCoD,eAAtC,CAAP;AACH,GAbD;;AAcA,SAAOvC,MAAM,CAACC,MAAP,CAAcmC,MAAd,EAAsB;AACzBF,IAAAA,QADyB;AAEzBC,IAAAA,QAAQ,EAAEJ,YAAY,CAACY,IAAb,CAAkB,IAAlB,EAAwBT,QAAxB;AAFe,GAAtB,CAAP;AAIH;;MCjBY/C,OAAO,GAAG4C,YAAY,CAACG,iBAAD,EAAW;AAC1CvC,EAAAA,OAAO,EAAE;AACL,kBAAe,sBAAqBf,OAAQ,IAAGgE,+BAAY,EAAG;AADzD;AADiC,CAAX,CAA5B;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/request/dist-src/fetch-wrapper.js b/node_modules/@octokit/request/dist-src/fetch-wrapper.js index 0746ce6e..79653c42 100644 --- a/node_modules/@octokit/request/dist-src/fetch-wrapper.js +++ b/node_modules/@octokit/request/dist-src/fetch-wrapper.js @@ -3,6 +3,9 @@ import nodeFetch from "node-fetch"; import { RequestError } from "@octokit/request-error"; import getBuffer from "./get-buffer-response"; export default function fetchWrapper(requestOptions) { + const log = requestOptions.request && requestOptions.request.log + ? requestOptions.request.log + : console; if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { requestOptions.body = JSON.stringify(requestOptions.body); @@ -20,12 +23,17 @@ export default function fetchWrapper(requestOptions) { // `requestOptions.request.agent` type is incompatible // see https://github.com/octokit/types.ts/pull/264 requestOptions.request)) - .then((response) => { + .then(async (response) => { url = response.url; status = response.status; for (const keyAndValue of response.headers) { headers[keyAndValue[0]] = keyAndValue[1]; } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); + } if (status === 204 || status === 205) { return; } @@ -35,46 +43,40 @@ export default function fetchWrapper(requestOptions) { return; } throw new RequestError(response.statusText, status, { - headers, + response: { + url, + status, + headers, + data: undefined, + }, request: requestOptions, }); } if (status === 304) { throw new RequestError("Not modified", status, { - headers, + response: { + url, + status, + headers, + data: await getResponseData(response), + }, request: requestOptions, }); } if (status >= 400) { - return response - .text() - .then((message) => { - const error = new RequestError(message, status, { + const data = await getResponseData(response); + const error = new RequestError(toErrorMessage(data), status, { + response: { + url, + status, headers, - request: requestOptions, - }); - try { - let responseBody = JSON.parse(error.message); - Object.assign(error, responseBody); - let errors = responseBody.errors; - // Assumption `errors` would always be in Array format - error.message = - error.message + ": " + errors.map(JSON.stringify).join(", "); - } - catch (e) { - // ignore, see octokit/rest.js#684 - } - throw error; + data, + }, + request: requestOptions, }); + throw error; } - const contentType = response.headers.get("content-type"); - if (/application\/json/.test(contentType)) { - return response.json(); - } - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - return getBuffer(response); + return getResponseData(response); }) .then((data) => { return { @@ -85,12 +87,33 @@ export default function fetchWrapper(requestOptions) { }; }) .catch((error) => { - if (error instanceof RequestError) { + if (error instanceof RequestError) throw error; - } throw new RequestError(error.message, 500, { - headers, request: requestOptions, }); }); } +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json(); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBuffer(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + // istanbul ignore else - just in case + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + return data.message; + } + // istanbul ignore next - just in case + return `Unknown error: ${JSON.stringify(data)}`; +} diff --git a/node_modules/@octokit/request/dist-src/version.js b/node_modules/@octokit/request/dist-src/version.js index 4e94e69d..54eb65b0 100644 --- a/node_modules/@octokit/request/dist-src/version.js +++ b/node_modules/@octokit/request/dist-src/version.js @@ -1 +1 @@ -export const VERSION = "5.4.15"; +export const VERSION = "5.6.0"; diff --git a/node_modules/@octokit/request/dist-types/version.d.ts b/node_modules/@octokit/request/dist-types/version.d.ts index 2ec648da..9c82454c 100644 --- a/node_modules/@octokit/request/dist-types/version.d.ts +++ b/node_modules/@octokit/request/dist-types/version.d.ts @@ -1 +1 @@ -export declare const VERSION = "5.4.15"; +export declare const VERSION = "5.6.0"; diff --git a/node_modules/@octokit/request/dist-web/index.js b/node_modules/@octokit/request/dist-web/index.js index 45c6e97c..4aaa7acf 100644 --- a/node_modules/@octokit/request/dist-web/index.js +++ b/node_modules/@octokit/request/dist-web/index.js @@ -4,13 +4,16 @@ import { isPlainObject } from 'is-plain-object'; import nodeFetch from 'node-fetch'; import { RequestError } from '@octokit/request-error'; -const VERSION = "5.4.15"; +const VERSION = "5.6.0"; function getBufferResponse(response) { return response.arrayBuffer(); } function fetchWrapper(requestOptions) { + const log = requestOptions.request && requestOptions.request.log + ? requestOptions.request.log + : console; if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { requestOptions.body = JSON.stringify(requestOptions.body); @@ -28,12 +31,17 @@ function fetchWrapper(requestOptions) { // `requestOptions.request.agent` type is incompatible // see https://github.com/octokit/types.ts/pull/264 requestOptions.request)) - .then((response) => { + .then(async (response) => { url = response.url; status = response.status; for (const keyAndValue of response.headers) { headers[keyAndValue[0]] = keyAndValue[1]; } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); + } if (status === 204 || status === 205) { return; } @@ -43,46 +51,40 @@ function fetchWrapper(requestOptions) { return; } throw new RequestError(response.statusText, status, { - headers, + response: { + url, + status, + headers, + data: undefined, + }, request: requestOptions, }); } if (status === 304) { throw new RequestError("Not modified", status, { - headers, + response: { + url, + status, + headers, + data: await getResponseData(response), + }, request: requestOptions, }); } if (status >= 400) { - return response - .text() - .then((message) => { - const error = new RequestError(message, status, { + const data = await getResponseData(response); + const error = new RequestError(toErrorMessage(data), status, { + response: { + url, + status, headers, - request: requestOptions, - }); - try { - let responseBody = JSON.parse(error.message); - Object.assign(error, responseBody); - let errors = responseBody.errors; - // Assumption `errors` would always be in Array format - error.message = - error.message + ": " + errors.map(JSON.stringify).join(", "); - } - catch (e) { - // ignore, see octokit/rest.js#684 - } - throw error; + data, + }, + request: requestOptions, }); + throw error; } - const contentType = response.headers.get("content-type"); - if (/application\/json/.test(contentType)) { - return response.json(); - } - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - return getBufferResponse(response); + return getResponseData(response); }) .then((data) => { return { @@ -93,15 +95,36 @@ function fetchWrapper(requestOptions) { }; }) .catch((error) => { - if (error instanceof RequestError) { + if (error instanceof RequestError) throw error; - } throw new RequestError(error.message, 500, { - headers, request: requestOptions, }); }); } +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json(); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + // istanbul ignore else - just in case + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + return data.message; + } + // istanbul ignore next - just in case + return `Unknown error: ${JSON.stringify(data)}`; +} function withDefaults(oldEndpoint, newDefaults) { const endpoint = oldEndpoint.defaults(newDefaults); diff --git a/node_modules/@octokit/request/dist-web/index.js.map b/node_modules/@octokit/request/dist-web/index.js.map index d3d24bb9..e39af15a 100644 --- a/node_modules/@octokit/request/dist-web/index.js.map +++ b/node_modules/@octokit/request/dist-web/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/get-buffer-response.js","../dist-src/fetch-wrapper.js","../dist-src/with-defaults.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"5.4.15\";\n","export default function getBufferResponse(response) {\n return response.arrayBuffer();\n}\n","import { isPlainObject } from \"is-plain-object\";\nimport nodeFetch from \"node-fetch\";\nimport { RequestError } from \"@octokit/request-error\";\nimport getBuffer from \"./get-buffer-response\";\nexport default function fetchWrapper(requestOptions) {\n if (isPlainObject(requestOptions.body) ||\n Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n const fetch = (requestOptions.request && requestOptions.request.fetch) || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect,\n }, \n // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request))\n .then((response) => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (status === 204 || status === 205) {\n return;\n }\n // GitHub API returns 200 for HEAD requests\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new RequestError(response.statusText, status, {\n headers,\n request: requestOptions,\n });\n }\n if (status === 304) {\n throw new RequestError(\"Not modified\", status, {\n headers,\n request: requestOptions,\n });\n }\n if (status >= 400) {\n return response\n .text()\n .then((message) => {\n const error = new RequestError(message, status, {\n headers,\n request: requestOptions,\n });\n try {\n let responseBody = JSON.parse(error.message);\n Object.assign(error, responseBody);\n let errors = responseBody.errors;\n // Assumption `errors` would always be in Array format\n error.message =\n error.message + \": \" + errors.map(JSON.stringify).join(\", \");\n }\n catch (e) {\n // ignore, see octokit/rest.js#684\n }\n throw error;\n });\n }\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBuffer(response);\n })\n .then((data) => {\n return {\n status,\n url,\n headers,\n data,\n };\n })\n .catch((error) => {\n if (error instanceof RequestError) {\n throw error;\n }\n throw new RequestError(error.message, 500, {\n headers,\n request: requestOptions,\n });\n });\n}\n","import fetchWrapper from \"./fetch-wrapper\";\nexport default function withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint),\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint),\n });\n}\n","import { endpoint } from \"@octokit/endpoint\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport withDefaults from \"./with-defaults\";\nexport const request = withDefaults(endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${getUserAgent()}`,\n },\n});\n"],"names":["getBuffer"],"mappings":";;;;;;AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACA3B,SAAS,iBAAiB,CAAC,QAAQ,EAAE;AACpD,IAAI,OAAO,QAAQ,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;;ACEc,SAAS,YAAY,CAAC,cAAc,EAAE;AACrD,IAAI,IAAI,aAAa,CAAC,cAAc,CAAC,IAAI,CAAC;AAC1C,QAAQ,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,IAAI,CAAC,EAAE;AAC5C,QAAQ,cAAc,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;AAClE,KAAK;AACL,IAAI,IAAI,OAAO,GAAG,EAAE,CAAC;AACrB,IAAI,IAAI,MAAM,CAAC;AACf,IAAI,IAAI,GAAG,CAAC;AACZ,IAAI,MAAM,KAAK,GAAG,CAAC,cAAc,CAAC,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC,KAAK,KAAK,SAAS,CAAC;AACxF,IAAI,OAAO,KAAK,CAAC,cAAc,CAAC,GAAG,EAAE,MAAM,CAAC,MAAM,CAAC;AACnD,QAAQ,MAAM,EAAE,cAAc,CAAC,MAAM;AACrC,QAAQ,IAAI,EAAE,cAAc,CAAC,IAAI;AACjC,QAAQ,OAAO,EAAE,cAAc,CAAC,OAAO;AACvC,QAAQ,QAAQ,EAAE,cAAc,CAAC,QAAQ;AACzC,KAAK;AACL;AACA;AACA,IAAI,cAAc,CAAC,OAAO,CAAC,CAAC;AAC5B,SAAS,IAAI,CAAC,CAAC,QAAQ,KAAK;AAC5B,QAAQ,GAAG,GAAG,QAAQ,CAAC,GAAG,CAAC;AAC3B,QAAQ,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;AACjC,QAAQ,KAAK,MAAM,WAAW,IAAI,QAAQ,CAAC,OAAO,EAAE;AACpD,YAAY,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;AACrD,SAAS;AACT,QAAQ,IAAI,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,GAAG,EAAE;AAC9C,YAAY,OAAO;AACnB,SAAS;AACT;AACA,QAAQ,IAAI,cAAc,CAAC,MAAM,KAAK,MAAM,EAAE;AAC9C,YAAY,IAAI,MAAM,GAAG,GAAG,EAAE;AAC9B,gBAAgB,OAAO;AACvB,aAAa;AACb,YAAY,MAAM,IAAI,YAAY,CAAC,QAAQ,CAAC,UAAU,EAAE,MAAM,EAAE;AAChE,gBAAgB,OAAO;AACvB,gBAAgB,OAAO,EAAE,cAAc;AACvC,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,IAAI,MAAM,KAAK,GAAG,EAAE;AAC5B,YAAY,MAAM,IAAI,YAAY,CAAC,cAAc,EAAE,MAAM,EAAE;AAC3D,gBAAgB,OAAO;AACvB,gBAAgB,OAAO,EAAE,cAAc;AACvC,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,IAAI,MAAM,IAAI,GAAG,EAAE;AAC3B,YAAY,OAAO,QAAQ;AAC3B,iBAAiB,IAAI,EAAE;AACvB,iBAAiB,IAAI,CAAC,CAAC,OAAO,KAAK;AACnC,gBAAgB,MAAM,KAAK,GAAG,IAAI,YAAY,CAAC,OAAO,EAAE,MAAM,EAAE;AAChE,oBAAoB,OAAO;AAC3B,oBAAoB,OAAO,EAAE,cAAc;AAC3C,iBAAiB,CAAC,CAAC;AACnB,gBAAgB,IAAI;AACpB,oBAAoB,IAAI,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;AACjE,oBAAoB,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,CAAC;AACvD,oBAAoB,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;AACrD;AACA,oBAAoB,KAAK,CAAC,OAAO;AACjC,wBAAwB,KAAK,CAAC,OAAO,GAAG,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACrF,iBAAiB;AACjB,gBAAgB,OAAO,CAAC,EAAE;AAC1B;AACA,iBAAiB;AACjB,gBAAgB,MAAM,KAAK,CAAC;AAC5B,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,MAAM,WAAW,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;AACjE,QAAQ,IAAI,mBAAmB,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;AACnD,YAAY,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC;AACnC,SAAS;AACT,QAAQ,IAAI,CAAC,WAAW,IAAI,wBAAwB,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;AACxE,YAAY,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC;AACnC,SAAS;AACT,QAAQ,OAAOA,iBAAS,CAAC,QAAQ,CAAC,CAAC;AACnC,KAAK,CAAC;AACN,SAAS,IAAI,CAAC,CAAC,IAAI,KAAK;AACxB,QAAQ,OAAO;AACf,YAAY,MAAM;AAClB,YAAY,GAAG;AACf,YAAY,OAAO;AACnB,YAAY,IAAI;AAChB,SAAS,CAAC;AACV,KAAK,CAAC;AACN,SAAS,KAAK,CAAC,CAAC,KAAK,KAAK;AAC1B,QAAQ,IAAI,KAAK,YAAY,YAAY,EAAE;AAC3C,YAAY,MAAM,KAAK,CAAC;AACxB,SAAS;AACT,QAAQ,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,EAAE;AACnD,YAAY,OAAO;AACnB,YAAY,OAAO,EAAE,cAAc;AACnC,SAAS,CAAC,CAAC;AACX,KAAK,CAAC,CAAC;AACP,CAAC;;AC9Fc,SAAS,YAAY,CAAC,WAAW,EAAE,WAAW,EAAE;AAC/D,IAAI,MAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;AACvD,IAAI,MAAM,MAAM,GAAG,UAAU,KAAK,EAAE,UAAU,EAAE;AAChD,QAAQ,MAAM,eAAe,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;AAClE,QAAQ,IAAI,CAAC,eAAe,CAAC,OAAO,IAAI,CAAC,eAAe,CAAC,OAAO,CAAC,IAAI,EAAE;AACvE,YAAY,OAAO,YAAY,CAAC,QAAQ,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,CAAC;AACjE,SAAS;AACT,QAAQ,MAAM,OAAO,GAAG,CAAC,KAAK,EAAE,UAAU,KAAK;AAC/C,YAAY,OAAO,YAAY,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;AACnF,SAAS,CAAC;AACV,QAAQ,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE;AAC/B,YAAY,QAAQ;AACpB,YAAY,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AACvD,SAAS,CAAC,CAAC;AACX,QAAQ,OAAO,eAAe,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;AACtE,KAAK,CAAC;AACN,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE;AACjC,QAAQ,QAAQ;AAChB,QAAQ,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AACnD,KAAK,CAAC,CAAC;AACP,CAAC;;ACjBW,MAAC,OAAO,GAAG,YAAY,CAAC,QAAQ,EAAE;AAC9C,IAAI,OAAO,EAAE;AACb,QAAQ,YAAY,EAAE,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC;AACvE,KAAK;AACL,CAAC,CAAC;;;;"} \ No newline at end of file +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/get-buffer-response.js","../dist-src/fetch-wrapper.js","../dist-src/with-defaults.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"5.6.0\";\n","export default function getBufferResponse(response) {\n return response.arrayBuffer();\n}\n","import { isPlainObject } from \"is-plain-object\";\nimport nodeFetch from \"node-fetch\";\nimport { RequestError } from \"@octokit/request-error\";\nimport getBuffer from \"./get-buffer-response\";\nexport default function fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log\n ? requestOptions.request.log\n : console;\n if (isPlainObject(requestOptions.body) ||\n Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n const fetch = (requestOptions.request && requestOptions.request.fetch) || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect,\n }, \n // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request))\n .then(async (response) => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n if (status === 204 || status === 205) {\n return;\n }\n // GitHub API returns 200 for HEAD requests\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined,\n },\n request: requestOptions,\n });\n }\n if (status === 304) {\n throw new RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response),\n },\n request: requestOptions,\n });\n }\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data,\n },\n request: requestOptions,\n });\n throw error;\n }\n return getResponseData(response);\n })\n .then((data) => {\n return {\n status,\n url,\n headers,\n data,\n };\n })\n .catch((error) => {\n if (error instanceof RequestError)\n throw error;\n throw new RequestError(error.message, 500, {\n request: requestOptions,\n });\n });\n}\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBuffer(response);\n}\nfunction toErrorMessage(data) {\n if (typeof data === \"string\")\n return data;\n // istanbul ignore else - just in case\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n return data.message;\n }\n // istanbul ignore next - just in case\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n","import fetchWrapper from \"./fetch-wrapper\";\nexport default function withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint),\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint),\n });\n}\n","import { endpoint } from \"@octokit/endpoint\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport withDefaults from \"./with-defaults\";\nexport const request = withDefaults(endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${getUserAgent()}`,\n },\n});\n"],"names":["getBuffer"],"mappings":";;;;;;AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACA3B,SAAS,iBAAiB,CAAC,QAAQ,EAAE;AACpD,IAAI,OAAO,QAAQ,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;;ACEc,SAAS,YAAY,CAAC,cAAc,EAAE;AACrD,IAAI,MAAM,GAAG,GAAG,cAAc,CAAC,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC,GAAG;AACpE,UAAU,cAAc,CAAC,OAAO,CAAC,GAAG;AACpC,UAAU,OAAO,CAAC;AAClB,IAAI,IAAI,aAAa,CAAC,cAAc,CAAC,IAAI,CAAC;AAC1C,QAAQ,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,IAAI,CAAC,EAAE;AAC5C,QAAQ,cAAc,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;AAClE,KAAK;AACL,IAAI,IAAI,OAAO,GAAG,EAAE,CAAC;AACrB,IAAI,IAAI,MAAM,CAAC;AACf,IAAI,IAAI,GAAG,CAAC;AACZ,IAAI,MAAM,KAAK,GAAG,CAAC,cAAc,CAAC,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC,KAAK,KAAK,SAAS,CAAC;AACxF,IAAI,OAAO,KAAK,CAAC,cAAc,CAAC,GAAG,EAAE,MAAM,CAAC,MAAM,CAAC;AACnD,QAAQ,MAAM,EAAE,cAAc,CAAC,MAAM;AACrC,QAAQ,IAAI,EAAE,cAAc,CAAC,IAAI;AACjC,QAAQ,OAAO,EAAE,cAAc,CAAC,OAAO;AACvC,QAAQ,QAAQ,EAAE,cAAc,CAAC,QAAQ;AACzC,KAAK;AACL;AACA;AACA,IAAI,cAAc,CAAC,OAAO,CAAC,CAAC;AAC5B,SAAS,IAAI,CAAC,OAAO,QAAQ,KAAK;AAClC,QAAQ,GAAG,GAAG,QAAQ,CAAC,GAAG,CAAC;AAC3B,QAAQ,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;AACjC,QAAQ,KAAK,MAAM,WAAW,IAAI,QAAQ,CAAC,OAAO,EAAE;AACpD,YAAY,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;AACrD,SAAS;AACT,QAAQ,IAAI,aAAa,IAAI,OAAO,EAAE;AACtC,YAAY,MAAM,OAAO,GAAG,OAAO,CAAC,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,8BAA8B,CAAC,CAAC;AAC/F,YAAY,MAAM,eAAe,GAAG,OAAO,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC;AAC7D,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,oBAAoB,EAAE,cAAc,CAAC,MAAM,CAAC,CAAC,EAAE,cAAc,CAAC,GAAG,CAAC,kDAAkD,EAAE,OAAO,CAAC,MAAM,CAAC,EAAE,eAAe,GAAG,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC;AAClN,SAAS;AACT,QAAQ,IAAI,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,GAAG,EAAE;AAC9C,YAAY,OAAO;AACnB,SAAS;AACT;AACA,QAAQ,IAAI,cAAc,CAAC,MAAM,KAAK,MAAM,EAAE;AAC9C,YAAY,IAAI,MAAM,GAAG,GAAG,EAAE;AAC9B,gBAAgB,OAAO;AACvB,aAAa;AACb,YAAY,MAAM,IAAI,YAAY,CAAC,QAAQ,CAAC,UAAU,EAAE,MAAM,EAAE;AAChE,gBAAgB,QAAQ,EAAE;AAC1B,oBAAoB,GAAG;AACvB,oBAAoB,MAAM;AAC1B,oBAAoB,OAAO;AAC3B,oBAAoB,IAAI,EAAE,SAAS;AACnC,iBAAiB;AACjB,gBAAgB,OAAO,EAAE,cAAc;AACvC,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,IAAI,MAAM,KAAK,GAAG,EAAE;AAC5B,YAAY,MAAM,IAAI,YAAY,CAAC,cAAc,EAAE,MAAM,EAAE;AAC3D,gBAAgB,QAAQ,EAAE;AAC1B,oBAAoB,GAAG;AACvB,oBAAoB,MAAM;AAC1B,oBAAoB,OAAO;AAC3B,oBAAoB,IAAI,EAAE,MAAM,eAAe,CAAC,QAAQ,CAAC;AACzD,iBAAiB;AACjB,gBAAgB,OAAO,EAAE,cAAc;AACvC,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,IAAI,MAAM,IAAI,GAAG,EAAE;AAC3B,YAAY,MAAM,IAAI,GAAG,MAAM,eAAe,CAAC,QAAQ,CAAC,CAAC;AACzD,YAAY,MAAM,KAAK,GAAG,IAAI,YAAY,CAAC,cAAc,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE;AACzE,gBAAgB,QAAQ,EAAE;AAC1B,oBAAoB,GAAG;AACvB,oBAAoB,MAAM;AAC1B,oBAAoB,OAAO;AAC3B,oBAAoB,IAAI;AACxB,iBAAiB;AACjB,gBAAgB,OAAO,EAAE,cAAc;AACvC,aAAa,CAAC,CAAC;AACf,YAAY,MAAM,KAAK,CAAC;AACxB,SAAS;AACT,QAAQ,OAAO,eAAe,CAAC,QAAQ,CAAC,CAAC;AACzC,KAAK,CAAC;AACN,SAAS,IAAI,CAAC,CAAC,IAAI,KAAK;AACxB,QAAQ,OAAO;AACf,YAAY,MAAM;AAClB,YAAY,GAAG;AACf,YAAY,OAAO;AACnB,YAAY,IAAI;AAChB,SAAS,CAAC;AACV,KAAK,CAAC;AACN,SAAS,KAAK,CAAC,CAAC,KAAK,KAAK;AAC1B,QAAQ,IAAI,KAAK,YAAY,YAAY;AACzC,YAAY,MAAM,KAAK,CAAC;AACxB,QAAQ,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,EAAE;AACnD,YAAY,OAAO,EAAE,cAAc;AACnC,SAAS,CAAC,CAAC;AACX,KAAK,CAAC,CAAC;AACP,CAAC;AACD,eAAe,eAAe,CAAC,QAAQ,EAAE;AACzC,IAAI,MAAM,WAAW,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;AAC7D,IAAI,IAAI,mBAAmB,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;AAC/C,QAAQ,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC;AAC/B,KAAK;AACL,IAAI,IAAI,CAAC,WAAW,IAAI,wBAAwB,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;AACpE,QAAQ,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC;AAC/B,KAAK;AACL,IAAI,OAAOA,iBAAS,CAAC,QAAQ,CAAC,CAAC;AAC/B,CAAC;AACD,SAAS,cAAc,CAAC,IAAI,EAAE;AAC9B,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ;AAChC,QAAQ,OAAO,IAAI,CAAC;AACpB;AACA,IAAI,IAAI,SAAS,IAAI,IAAI,EAAE;AAC3B,QAAQ,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;AACxC,YAAY,OAAO,CAAC,EAAE,IAAI,CAAC,OAAO,CAAC,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACpF,SAAS;AACT,QAAQ,OAAO,IAAI,CAAC,OAAO,CAAC;AAC5B,KAAK;AACL;AACA,IAAI,OAAO,CAAC,eAAe,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACpD,CAAC;;ACrHc,SAAS,YAAY,CAAC,WAAW,EAAE,WAAW,EAAE;AAC/D,IAAI,MAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;AACvD,IAAI,MAAM,MAAM,GAAG,UAAU,KAAK,EAAE,UAAU,EAAE;AAChD,QAAQ,MAAM,eAAe,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;AAClE,QAAQ,IAAI,CAAC,eAAe,CAAC,OAAO,IAAI,CAAC,eAAe,CAAC,OAAO,CAAC,IAAI,EAAE;AACvE,YAAY,OAAO,YAAY,CAAC,QAAQ,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,CAAC;AACjE,SAAS;AACT,QAAQ,MAAM,OAAO,GAAG,CAAC,KAAK,EAAE,UAAU,KAAK;AAC/C,YAAY,OAAO,YAAY,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;AACnF,SAAS,CAAC;AACV,QAAQ,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE;AAC/B,YAAY,QAAQ;AACpB,YAAY,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AACvD,SAAS,CAAC,CAAC;AACX,QAAQ,OAAO,eAAe,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;AACtE,KAAK,CAAC;AACN,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE;AACjC,QAAQ,QAAQ;AAChB,QAAQ,QAAQ,EAAE,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC;AACnD,KAAK,CAAC,CAAC;AACP,CAAC;;ACjBW,MAAC,OAAO,GAAG,YAAY,CAAC,QAAQ,EAAE;AAC9C,IAAI,OAAO,EAAE;AACb,QAAQ,YAAY,EAAE,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,CAAC;AACvE,KAAK;AACL,CAAC,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/request/node_modules/is-plain-object/LICENSE b/node_modules/@octokit/request/node_modules/is-plain-object/LICENSE deleted file mode 100644 index 3f2eca18..00000000 --- a/node_modules/@octokit/request/node_modules/is-plain-object/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2017, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/@octokit/request/node_modules/is-plain-object/README.md b/node_modules/@octokit/request/node_modules/is-plain-object/README.md deleted file mode 100644 index 5c074ab0..00000000 --- a/node_modules/@octokit/request/node_modules/is-plain-object/README.md +++ /dev/null @@ -1,125 +0,0 @@ -# is-plain-object [![NPM version](https://img.shields.io/npm/v/is-plain-object.svg?style=flat)](https://www.npmjs.com/package/is-plain-object) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-plain-object.svg?style=flat)](https://npmjs.org/package/is-plain-object) [![NPM total downloads](https://img.shields.io/npm/dt/is-plain-object.svg?style=flat)](https://npmjs.org/package/is-plain-object) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-plain-object.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-plain-object) - -> Returns true if an object was created by the `Object` constructor, or Object.create(null). - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save is-plain-object -``` - -Use [isobject](https://github.com/jonschlinkert/isobject) if you only want to check if the value is an object and not an array or null. - -## Usage - -with es modules -```js -import { isPlainObject } from 'is-plain-object'; -``` - -or with commonjs -```js -const { isPlainObject } = require('is-plain-object'); -``` - -**true** when created by the `Object` constructor, or Object.create(null). - -```js -isPlainObject(Object.create({})); -//=> true -isPlainObject(Object.create(Object.prototype)); -//=> true -isPlainObject({foo: 'bar'}); -//=> true -isPlainObject({}); -//=> true -isPlainObject(null); -//=> true -``` - -**false** when not created by the `Object` constructor. - -```js -isPlainObject(1); -//=> false -isPlainObject(['foo', 'bar']); -//=> false -isPlainObject([]); -//=> false -isPlainObject(new Foo); -//=> false -isPlainObject(Object.create(null)); -//=> false -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Related projects - -You might also be interested in these projects: - -* [is-number](https://www.npmjs.com/package/is-number): Returns true if a number or string value is a finite number. Useful for regex… [more](https://github.com/jonschlinkert/is-number) | [homepage](https://github.com/jonschlinkert/is-number "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.") -* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") -* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 19 | [jonschlinkert](https://github.com/jonschlinkert) | -| 6 | [TrySound](https://github.com/TrySound) | -| 6 | [stevenvachon](https://github.com/stevenvachon) | -| 3 | [onokumus](https://github.com/onokumus) | -| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 28, 2019._ diff --git a/node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js b/node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js deleted file mode 100644 index d134e4f2..00000000 --- a/node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js +++ /dev/null @@ -1,38 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, '__esModule', { value: true }); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -exports.isPlainObject = isPlainObject; diff --git a/node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.mjs b/node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.mjs deleted file mode 100644 index c2d9f351..00000000 --- a/node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.mjs +++ /dev/null @@ -1,34 +0,0 @@ -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -export { isPlainObject }; diff --git a/node_modules/@octokit/request/node_modules/is-plain-object/is-plain-object.d.ts b/node_modules/@octokit/request/node_modules/is-plain-object/is-plain-object.d.ts deleted file mode 100644 index a359940d..00000000 --- a/node_modules/@octokit/request/node_modules/is-plain-object/is-plain-object.d.ts +++ /dev/null @@ -1 +0,0 @@ -export function isPlainObject(o: any): boolean; diff --git a/node_modules/@octokit/request/node_modules/is-plain-object/package.json b/node_modules/@octokit/request/node_modules/is-plain-object/package.json deleted file mode 100644 index 3ea169a7..00000000 --- a/node_modules/@octokit/request/node_modules/is-plain-object/package.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "name": "is-plain-object", - "description": "Returns true if an object was created by the `Object` constructor, or Object.create(null).", - "version": "5.0.0", - "homepage": "https://github.com/jonschlinkert/is-plain-object", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Osman Nuri Okumuş (http://onokumus.com)", - "Steven Vachon (https://svachon.com)", - "(https://github.com/wtgtybhertgeghgtwtg)", - "Bogdan Chadkin (https://github.com/TrySound)" - ], - "repository": "jonschlinkert/is-plain-object", - "bugs": { - "url": "https://github.com/jonschlinkert/is-plain-object/issues" - }, - "license": "MIT", - "main": "dist/is-plain-object.js", - "module": "dist/is-plain-object.mjs", - "types": "is-plain-object.d.ts", - "files": [ - "is-plain-object.d.ts", - "dist" - ], - "exports": { - ".": { - "import": "./dist/is-plain-object.mjs", - "require": "./dist/is-plain-object.js" - }, - "./package.json": "./package.json" - }, - "engines": { - "node": ">=0.10.0" - }, - "scripts": { - "build": "rollup -c", - "test_browser": "mocha-headless-chrome --args=disable-web-security -f test/browser.html", - "test_node": "mocha -r esm", - "test": "npm run test_node && npm run build && npm run test_browser", - "prepare": "rollup -c" - }, - "devDependencies": { - "chai": "^4.2.0", - "esm": "^3.2.22", - "gulp-format-md": "^1.0.0", - "mocha": "^6.1.4", - "mocha-headless-chrome": "^3.1.0", - "rollup": "^2.22.1" - }, - "keywords": [ - "check", - "is", - "is-object", - "isobject", - "javascript", - "kind", - "kind-of", - "object", - "plain", - "type", - "typeof", - "value" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "related": { - "list": [ - "is-number", - "isobject", - "kind-of" - ] - }, - "lint": { - "reflinks": true - } - } -} diff --git a/node_modules/@octokit/request/package.json b/node_modules/@octokit/request/package.json index 970cf377..254d9501 100644 --- a/node_modules/@octokit/request/package.json +++ b/node_modules/@octokit/request/package.json @@ -1,7 +1,7 @@ { "name": "@octokit/request", "description": "Send parameterized requests to GitHub’s APIs with sensible defaults in browsers and Node", - "version": "5.4.15", + "version": "5.6.0", "license": "MIT", "files": [ "dist-*/", @@ -18,8 +18,8 @@ "repository": "github:octokit/request.js", "dependencies": { "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.0.0", - "@octokit/types": "^6.7.1", + "@octokit/request-error": "^2.1.0", + "@octokit/types": "^6.16.1", "is-plain-object": "^5.0.0", "node-fetch": "^2.6.1", "universal-user-agent": "^6.0.0" @@ -37,12 +37,12 @@ "@types/node-fetch": "^2.3.3", "@types/once": "^1.4.0", "fetch-mock": "^9.3.1", - "jest": "^26.0.1", + "jest": "^27.0.0", "lolex": "^6.0.0", - "prettier": "^2.0.1", + "prettier": "2.3.1", "semantic-release": "^17.0.0", "semantic-release-plugin-update-version-in-files": "^1.0.0", - "ts-jest": "^26.1.0", + "ts-jest": "^27.0.0", "typescript": "^4.0.2" }, "publishConfig": { diff --git a/node_modules/@octokit/types/README.md b/node_modules/@octokit/types/README.md index 3c34447c..c48ce424 100644 --- a/node_modules/@octokit/types/README.md +++ b/node_modules/@octokit/types/README.md @@ -27,7 +27,8 @@ See all exported types at https://octokit.github.io/types.ts ```ts import { Endpoints } from "@octokit/types"; -type listUserReposParameters = Endpoints["GET /repos/{owner}/{repo}"]["parameters"]; +type listUserReposParameters = + Endpoints["GET /repos/{owner}/{repo}"]["parameters"]; type listUserReposResponse = Endpoints["GET /repos/{owner}/{repo}"]["response"]; async function listRepos( diff --git a/node_modules/@octokit/types/dist-node/index.js b/node_modules/@octokit/types/dist-node/index.js index 3071baf8..cda9bf91 100644 --- a/node_modules/@octokit/types/dist-node/index.js +++ b/node_modules/@octokit/types/dist-node/index.js @@ -2,7 +2,7 @@ Object.defineProperty(exports, '__esModule', { value: true }); -const VERSION = "6.14.2"; +const VERSION = "6.16.4"; exports.VERSION = VERSION; //# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/types/dist-src/VERSION.js b/node_modules/@octokit/types/dist-src/VERSION.js index e874e01e..c5f6fe49 100644 --- a/node_modules/@octokit/types/dist-src/VERSION.js +++ b/node_modules/@octokit/types/dist-src/VERSION.js @@ -1 +1 @@ -export const VERSION = "6.14.2"; +export const VERSION = "6.16.4"; diff --git a/node_modules/@octokit/types/dist-types/VERSION.d.ts b/node_modules/@octokit/types/dist-types/VERSION.d.ts index 94d7cb28..aedf3f22 100644 --- a/node_modules/@octokit/types/dist-types/VERSION.d.ts +++ b/node_modules/@octokit/types/dist-types/VERSION.d.ts @@ -1 +1 @@ -export declare const VERSION = "6.14.2"; +export declare const VERSION = "6.16.4"; diff --git a/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts b/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts index 34e8b5d2..df80b47d 100644 --- a/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts +++ b/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts @@ -1466,6 +1466,10 @@ export interface Endpoints { * @see https://docs.github.com/rest/reference/repos#get-community-profile-metrics */ "GET /repos/{owner}/{repo}/community/profile": Operation<"/repos/{owner}/{repo}/community/profile", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#compare-two-commits + */ + "GET /repos/{owner}/{repo}/compare/{basehead}": Operation<"/repos/{owner}/{repo}/compare/{basehead}", "get">; /** * @see https://docs.github.com/rest/reference/repos#compare-two-commits */ @@ -2582,6 +2586,10 @@ export interface Endpoints { * @see https://docs.github.com/rest/reference/actions#create-a-remove-token-for-a-repository */ "POST /repos/{owner}/{repo}/actions/runners/remove-token": Operation<"/repos/{owner}/{repo}/actions/runners/remove-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#approve-a-workflow-run-for-a-fork-pull-request + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/approve", "post">; /** * @see https://docs.github.com/rest/reference/actions#cancel-a-workflow-run */ @@ -2650,6 +2658,10 @@ export interface Endpoints { * @see https://docs.github.com/rest/reference/repos#create-a-commit-comment */ "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#create-a-content-attachment + */ + "POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments": Operation<"/repos/{owner}/{repo}/content_references/{content_reference_id}/attachments", "post", "corsair">; /** * @see https://docs.github.com/rest/reference/repos#create-a-deployment */ @@ -2782,6 +2794,10 @@ export interface Endpoints { * @see https://docs.github.com/rest/reference/repos#create-a-release */ "POST /repos/{owner}/{repo}/releases": Operation<"/repos/{owner}/{repo}/releases", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-release + */ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions", "post", "squirrel-girl">; /** * @see https://docs.github.com/rest/reference/repos#create-a-commit-status */ diff --git a/node_modules/@octokit/types/dist-web/index.js b/node_modules/@octokit/types/dist-web/index.js index a864c663..323f0735 100644 --- a/node_modules/@octokit/types/dist-web/index.js +++ b/node_modules/@octokit/types/dist-web/index.js @@ -1,4 +1,4 @@ -const VERSION = "6.14.2"; +const VERSION = "6.16.4"; export { VERSION }; //# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/types/package.json b/node_modules/@octokit/types/package.json index b033eccf..3d0d5d52 100644 --- a/node_modules/@octokit/types/package.json +++ b/node_modules/@octokit/types/package.json @@ -1,7 +1,7 @@ { "name": "@octokit/types", "description": "Shared TypeScript definitions for Octokit projects", - "version": "6.14.2", + "version": "6.16.4", "license": "MIT", "files": [ "dist-*/", @@ -18,7 +18,7 @@ ], "repository": "github:octokit/types.ts", "dependencies": { - "@octokit/openapi-types": "^7.0.0" + "@octokit/openapi-types": "^7.3.2" }, "devDependencies": { "@octokit/graphql": "^4.2.2", @@ -47,7 +47,7 @@ "source": "dist-src/index.js", "types": "dist-types/index.d.ts", "octokit": { - "openapi-version": "2.17.4" + "openapi-version": "2.22.1" }, "main": "dist-node/index.js", "module": "dist-web/index.js" diff --git a/node_modules/@technote-space/anchor-markdown-header/package.json b/node_modules/@technote-space/anchor-markdown-header/package.json index 6064ceff..a0841eb6 100644 --- a/node_modules/@technote-space/anchor-markdown-header/package.json +++ b/node_modules/@technote-space/anchor-markdown-header/package.json @@ -1,6 +1,6 @@ { "name": "@technote-space/anchor-markdown-header", - "version": "1.1.18", + "version": "1.1.21", "description": "Generates an anchor for a markdown header.", "keywords": [ "markdown", @@ -44,19 +44,19 @@ "emoji-regex": "^9.2.2" }, "devDependencies": { - "@commitlint/cli": "^12.1.1", - "@commitlint/config-conventional": "^12.1.1", + "@commitlint/cli": "^12.1.4", + "@commitlint/config-conventional": "^12.1.4", "@types/jest": "^26.0.23", - "@types/node": "^15.0.1", - "@typescript-eslint/eslint-plugin": "^4.22.1", - "@typescript-eslint/parser": "^4.22.1", - "eslint": "^7.25.0", + "@types/node": "^15.12.4", + "@typescript-eslint/eslint-plugin": "^4.28.0", + "@typescript-eslint/parser": "^4.28.0", + "eslint": "^7.29.0", "husky": "^6.0.0", - "jest": "^26.6.3", - "jest-circus": "^26.6.3", - "lint-staged": "^10.5.4", - "ts-jest": "^26.5.5", - "typescript": "^4.2.4" + "jest": "^27.0.4", + "jest-circus": "^27.0.4", + "lint-staged": "^11.0.0", + "ts-jest": "^27.0.3", + "typescript": "^4.3.4" }, "publishConfig": { "access": "public" diff --git a/node_modules/@technote-space/doctoc/package.json b/node_modules/@technote-space/doctoc/package.json index ba314218..d7d95042 100644 --- a/node_modules/@technote-space/doctoc/package.json +++ b/node_modules/@technote-space/doctoc/package.json @@ -1,6 +1,6 @@ { "name": "@technote-space/doctoc", - "version": "2.4.5", + "version": "2.4.7", "description": "Generates TOC for markdown files of local git repo.", "keywords": [ "github", @@ -43,26 +43,26 @@ "update": "npx npm-check-updates -u && yarn install && yarn upgrade && yarn audit" }, "dependencies": { - "@technote-space/anchor-markdown-header": "^1.1.17", - "@textlint/markdown-to-ast": "^6.3.4", + "@technote-space/anchor-markdown-header": "^1.1.21", + "@textlint/markdown-to-ast": "^12.0.0", "htmlparser2": "^6.1.0", "update-section": "^0.3.3" }, "devDependencies": { - "@commitlint/cli": "^12.1.1", - "@commitlint/config-conventional": "^12.1.1", - "@textlint/ast-node-types": "^4.4.2", + "@commitlint/cli": "^12.1.4", + "@commitlint/config-conventional": "^12.1.4", + "@textlint/ast-node-types": "^12.0.0", "@types/jest": "^26.0.23", - "@types/node": "^15.0.1", - "@typescript-eslint/eslint-plugin": "^4.22.0", - "@typescript-eslint/parser": "^4.22.0", - "eslint": "^7.25.0", + "@types/node": "^15.12.4", + "@typescript-eslint/eslint-plugin": "^4.28.0", + "@typescript-eslint/parser": "^4.28.0", + "eslint": "^7.29.0", "husky": "^6.0.0", - "jest": "^26.6.3", - "jest-circus": "^26.6.3", - "lint-staged": "^10.5.4", - "ts-jest": "^26.5.5", - "typescript": "^4.2.4" + "jest": "^27.0.5", + "jest-circus": "^27.0.5", + "lint-staged": "^11.0.0", + "ts-jest": "^27.0.3", + "typescript": "^4.3.4" }, "publishConfig": { "access": "public" diff --git a/node_modules/@technote-space/filter-github-action/package.json b/node_modules/@technote-space/filter-github-action/package.json index f895537b..dfb3c811 100644 --- a/node_modules/@technote-space/filter-github-action/package.json +++ b/node_modules/@technote-space/filter-github-action/package.json @@ -1,6 +1,6 @@ { "name": "@technote-space/filter-github-action", - "version": "0.5.24", + "version": "0.5.28", "description": "Helper to filter GitHub Action.", "keywords": [ "github", @@ -34,20 +34,20 @@ "update": "npm_config_yes=true npx npm-check-updates -u --timeout 100000 && yarn install && yarn upgrade && yarn audit" }, "dependencies": { - "@actions/core": "^1.2.7", - "@actions/github": "^4.0.0" + "@actions/core": "^1.4.0", + "@actions/github": "^5.0.0" }, "devDependencies": { - "@technote-space/github-action-test-helper": "^0.7.6", + "@technote-space/github-action-test-helper": "^0.7.12", "@types/jest": "^26.0.23", - "@types/node": "^15.0.1", - "@typescript-eslint/eslint-plugin": "^4.22.0", - "@typescript-eslint/parser": "^4.22.0", - "eslint": "^7.25.0", - "jest": "^26.6.3", - "jest-circus": "^26.6.3", - "ts-jest": "^26.5.5", - "typescript": "^4.2.4" + "@types/node": "^15.12.2", + "@typescript-eslint/eslint-plugin": "^4.27.0", + "@typescript-eslint/parser": "^4.27.0", + "eslint": "^7.28.0", + "jest": "^27.0.4", + "jest-circus": "^27.0.4", + "ts-jest": "^27.0.3", + "typescript": "^4.3.4" }, "publishConfig": { "access": "public" diff --git a/node_modules/@technote-space/github-action-helper/dist/api-helper.js b/node_modules/@technote-space/github-action-helper/dist/api-helper.js index 361ba1fa..6224552b 100644 --- a/node_modules/@technote-space/github-action-helper/dist/api-helper.js +++ b/node_modules/@technote-space/github-action-helper/dist/api-helper.js @@ -62,7 +62,7 @@ class ApiHelper { * @return {Promise<{ path: string, sha: string }>} blob */ this.createBlob = async (rootDir, filepath) => { - const blob = await this.octokit.git.createBlob({ + const blob = await this.octokit.rest.git.createBlob({ ...this.context.repo, content: Buffer.from(fs_1.default.readFileSync(path_1.default.resolve(rootDir, filepath), 'utf8')).toString('base64'), encoding: 'base64', @@ -79,7 +79,7 @@ class ApiHelper { /** * @return {Promise} commit */ - this.getCommit = async () => this.getResponseData(this.octokit.git.getCommit({ + this.getCommit = async () => this.getResponseData(this.octokit.rest.git.getCommit({ ...this.context.repo, 'commit_sha': this.getCommitSha(), })); @@ -89,7 +89,7 @@ class ApiHelper { this.getPR = async () => { const key = parseInt(this.context.payload.number, 10); if (!(key in this.prCache)) { - this.prCache[key] = await this.getResponseData(this.octokit.pulls.get({ + this.prCache[key] = await this.getResponseData(this.octokit.rest.pulls.get({ ...this.context.repo, 'pull_number': this.context.payload.number, })); @@ -106,7 +106,7 @@ class ApiHelper { * @param {Array<{ path: string, sha: string }>} blobs blobs * @return {Promise} tree */ - this.createTree = async (blobs) => this.getResponseData(this.octokit.git.createTree({ + this.createTree = async (blobs) => this.getResponseData(this.octokit.rest.git.createTree({ ...this.context.repo, 'base_tree': utils_1.ensureNotNull(utils_1.objectGet((await this.getCommit()), 'tree.sha')), tree: blobs.map(blob => ({ @@ -121,7 +121,7 @@ class ApiHelper { * @param {GitCreateTreeResponseData} tree tree * @return {Promise} commit */ - this.createCommit = async (commitMessage, tree) => this.getResponseData(this.octokit.git.createCommit({ + this.createCommit = async (commitMessage, tree) => this.getResponseData(this.octokit.rest.git.createCommit({ ...this.context.repo, tree: tree.sha, parents: [this.getCommitSha()], @@ -133,7 +133,7 @@ class ApiHelper { */ this.getRef = async (refName) => { try { - return await this.getResponseData(this.octokit.git.getRef({ + return await this.getResponseData(this.octokit.rest.git.getRef({ ...this.context.repo, ref: refName, })); @@ -150,7 +150,7 @@ class ApiHelper { */ this.updateRef = async (commit, refName, force) => { try { - await this.octokit.git.updateRef({ + await this.octokit.rest.git.updateRef({ ...this.context.repo, ref: refName, sha: utils_1.ensureNotNull(commit.sha), @@ -174,7 +174,7 @@ class ApiHelper { * @return {Promise} void */ this.createRef = async (commit, refName) => { - await this.octokit.git.createRef({ + await this.octokit.rest.git.createRef({ ...this.context.repo, ref: refName, sha: utils_1.ensureNotNull(commit.sha), @@ -185,7 +185,7 @@ class ApiHelper { * @return {Promise} void */ this.deleteRef = async (refName) => { - await this.octokit.git.deleteRef({ + await this.octokit.rest.git.deleteRef({ ...this.context.repo, ref: refName, }); @@ -195,7 +195,7 @@ class ApiHelper { * @return {Promise} pull request */ this.findPullRequest = async (branchName) => { - const response = await this.octokit.pulls.list({ + const response = await this.octokit.rest.pulls.list({ ...this.context.repo, head: `${this.context.repo.owner}:${utils_1.getBranch(branchName, false)}`, }); @@ -208,7 +208,7 @@ class ApiHelper { * @param {PullsListParams} params params * @return {AsyncIterable>} pull request list */ - this.pullsList = (params) => this.octokit.paginate(this.octokit.pulls.list, Object.assign({ + this.pullsList = (params) => this.octokit.paginate(this.octokit.rest.pulls.list, Object.assign({ sort: 'created', direction: 'asc', }, params, { @@ -219,7 +219,7 @@ class ApiHelper { * @param {PullsCreateParams} detail detail * @return {Promise} pull */ - this.pullsCreate = async (branchName, detail) => this.getResponseData(this.octokit.pulls.create({ + this.pullsCreate = async (branchName, detail) => this.getResponseData(this.octokit.rest.pulls.create({ ...this.context.repo, head: `${this.context.repo.owner}:${utils_1.getBranch(branchName, false)}`, base: (await this.getRefForUpdate(false)).replace(/^heads\//, ''), @@ -230,7 +230,7 @@ class ApiHelper { * @param {PullsUpdateParams} detail detail * @return {Promise} pull */ - this.pullsUpdate = async (number, detail) => this.getResponseData(this.octokit.pulls.update({ + this.pullsUpdate = async (number, detail) => this.getResponseData(this.octokit.rest.pulls.update({ ...this.context.repo, 'pull_number': number, state: 'open', @@ -300,7 +300,7 @@ class ApiHelper { if (!pullRequest) { return false; } - await this.octokit.issues.createComment({ + await this.octokit.rest.issues.createComment({ ...this.context.repo, 'issue_number': pullRequest.number, body, @@ -418,7 +418,7 @@ class ApiHelper { if (false === sender) { throw new Error('Sender is not valid.'); } - const { data: user } = await this.octokit.users.getByUsername({ + const { data: user } = await this.octokit.rest.users.getByUsername({ username: sender, }); return { @@ -433,14 +433,14 @@ class ApiHelper { */ this.getDefaultBranch = async () => { var _a, _b; - return (_b = (_a = this.context.payload.repository) === null || _a === void 0 ? void 0 : _a.default_branch) !== null && _b !== void 0 ? _b : (await this.octokit.repos.get({ + return (_b = (_a = this.context.payload.repository) === null || _a === void 0 ? void 0 : _a.default_branch) !== null && _b !== void 0 ? _b : (await this.octokit.rest.repos.get({ ...this.context.repo, })).data.default_branch; }; /** * @return {Promise>} tags */ - this.getTags = async () => (await this.octokit.paginate(this.octokit.git.listMatchingRefs, { + this.getTags = async () => (await this.octokit.paginate(this.octokit.rest.git.listMatchingRefs, { ...this.context.repo, ref: 'tags/', })).map((item) => utils_1.trimRef(item.ref)); diff --git a/node_modules/@technote-space/github-action-helper/dist/types.d.ts b/node_modules/@technote-space/github-action-helper/dist/types.d.ts index c7e94d49..4d8aa418 100644 --- a/node_modules/@technote-space/github-action-helper/dist/types.d.ts +++ b/node_modules/@technote-space/github-action-helper/dist/types.d.ts @@ -1,3 +1,5 @@ import { GitHub } from '@actions/github/lib/utils'; import { RestEndpointMethods } from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'; -export declare type Octokit = InstanceType & RestEndpointMethods; +export declare type Octokit = InstanceType & { + rest: RestEndpointMethods; +}; diff --git a/node_modules/@technote-space/github-action-helper/package.json b/node_modules/@technote-space/github-action-helper/package.json index 71443768..cfae9e6e 100644 --- a/node_modules/@technote-space/github-action-helper/package.json +++ b/node_modules/@technote-space/github-action-helper/package.json @@ -1,6 +1,6 @@ { "name": "@technote-space/github-action-helper", - "version": "5.2.7", + "version": "5.2.9", "description": "Helper for GitHub Action.", "keywords": [ "github", @@ -38,28 +38,28 @@ }, "dependencies": { "@actions/core": "^1.2.7", - "@actions/github": "^4.0.0", + "@actions/github": "^5.0.0", "@octokit/openapi-types": "^7.0.0", "@octokit/plugin-rest-endpoint-methods": "^5.1.1", - "@technote-space/github-action-log-helper": "^0.1.24", + "@technote-space/github-action-log-helper": "^0.1.25", "shell-escape": "^0.2.0", "sprintf-js": "^1.1.2" }, "devDependencies": { - "@commitlint/cli": "^12.1.1", - "@commitlint/config-conventional": "^12.1.1", - "@technote-space/github-action-test-helper": "^0.7.9", + "@commitlint/cli": "^12.1.4", + "@commitlint/config-conventional": "^12.1.4", + "@technote-space/github-action-test-helper": "^0.7.12", "@types/jest": "^26.0.23", - "@types/node": "^15.0.1", - "@typescript-eslint/eslint-plugin": "^4.22.0", - "@typescript-eslint/parser": "^4.22.0", - "eslint": "^7.25.0", + "@types/node": "^15.3.0", + "@typescript-eslint/eslint-plugin": "^4.23.0", + "@typescript-eslint/parser": "^4.23.0", + "eslint": "^7.26.0", "husky": "^6.0.0", "jest": "^26.6.3", "jest-circus": "^26.6.3", - "lint-staged": "^10.5.4", + "lint-staged": "^11.0.0", "nock": "^13.0.11", - "ts-jest": "^26.5.5", + "ts-jest": "^26.5.6", "typescript": "^4.2.4" }, "publishConfig": { diff --git a/node_modules/@technote-space/github-action-log-helper/package.json b/node_modules/@technote-space/github-action-log-helper/package.json index d42a3e50..39aa1fce 100644 --- a/node_modules/@technote-space/github-action-log-helper/package.json +++ b/node_modules/@technote-space/github-action-log-helper/package.json @@ -1,6 +1,6 @@ { "name": "@technote-space/github-action-log-helper", - "version": "0.1.24", + "version": "0.1.28", "description": "Logging helpers for GitHub Actions.", "keywords": [ "github", @@ -37,25 +37,25 @@ "postpublish": "[ -n \"$CI\" ] || [ ! -f node_modules/.bin/pinst ] || pinst --enable" }, "dependencies": { - "@actions/core": "^1.2.7", + "@actions/core": "^1.4.0", "sprintf-js": "^1.1.2" }, "devDependencies": { - "@commitlint/cli": "^12.1.1", - "@commitlint/config-conventional": "^12.1.1", - "@technote-space/github-action-test-helper": "^0.7.8", + "@commitlint/cli": "^12.1.4", + "@commitlint/config-conventional": "^12.1.4", + "@technote-space/github-action-test-helper": "^0.7.12", "@types/jest": "^26.0.23", - "@types/node": "^15.0.1", - "@typescript-eslint/eslint-plugin": "^4.22.0", - "@typescript-eslint/parser": "^4.22.0", - "eslint": "^7.25.0", + "@types/node": "^15.12.2", + "@typescript-eslint/eslint-plugin": "^4.27.0", + "@typescript-eslint/parser": "^4.27.0", + "eslint": "^7.28.0", "husky": "^6.0.0", - "jest": "^26.6.3", - "jest-circus": "^26.6.3", - "lint-staged": "^10.5.4", + "jest": "^27.0.4", + "jest-circus": "^27.0.4", + "lint-staged": "^11.0.0", "pinst": "^2.1.6", - "ts-jest": "^26.5.5", - "typescript": "^4.2.4" + "ts-jest": "^27.0.3", + "typescript": "^4.3.3" }, "publishConfig": { "access": "public" diff --git a/node_modules/@technote-space/github-action-pr-helper/dist/utils/command.js b/node_modules/@technote-space/github-action-pr-helper/dist/utils/command.js index e70ca34f..9ab516c6 100644 --- a/node_modules/@technote-space/github-action-pr-helper/dist/utils/command.js +++ b/node_modules/@technote-space/github-action-pr-helper/dist/utils/command.js @@ -185,7 +185,7 @@ const isMergeable = (number, octokit, context) => __awaiter(void 0, void 0, void repo: context.actionContext.repo.repo, 'pull_number': number, }), () => __awaiter(void 0, void 0, void 0, function* () { - return ensureNotNullValue((yield octokit.pulls.get({ + return ensureNotNullValue((yield octokit.rest.pulls.get({ owner: context.actionContext.repo.owner, repo: context.actionContext.repo.repo, 'pull_number': number, @@ -198,18 +198,18 @@ const afterCreatePr = (branchName, number, helper, logger, octokit, context) => if ((_a = context.actionDetail.labels) === null || _a === void 0 ? void 0 : _a.length) { logger.info('Adding labels...'); console.log(context.actionDetail.labels); - yield octokit.issues.addLabels(Object.assign(Object.assign({}, context.actionContext.repo), { 'issue_number': number, labels: context.actionDetail.labels })); + yield octokit.rest.issues.addLabels(Object.assign(Object.assign({}, context.actionContext.repo), { 'issue_number': number, labels: context.actionDetail.labels })); } if ((_b = context.actionDetail.assignees) === null || _b === void 0 ? void 0 : _b.length) { logger.info('Adding assignees...'); console.log(context.actionDetail.assignees); - yield octokit.issues.addAssignees(Object.assign(Object.assign({}, context.actionContext.repo), { 'issue_number': number, assignees: context.actionDetail.assignees })); + yield octokit.rest.issues.addAssignees(Object.assign(Object.assign({}, context.actionContext.repo), { 'issue_number': number, assignees: context.actionDetail.assignees })); } if (((_c = context.actionDetail.reviewers) === null || _c === void 0 ? void 0 : _c.length) || ((_d = context.actionDetail.teamReviewers) === null || _d === void 0 ? void 0 : _d.length)) { logger.info('Adding reviewers...'); console.log(context.actionDetail.reviewers); console.log(context.actionDetail.teamReviewers); - yield octokit.pulls.requestReviewers(Object.assign(Object.assign({}, context.actionContext.repo), { 'pull_number': number, reviewers: context.actionDetail.reviewers, 'team_reviewers': context.actionDetail.teamReviewers })); + yield octokit.rest.pulls.requestReviewers(Object.assign(Object.assign({}, context.actionContext.repo), { 'pull_number': number, reviewers: context.actionDetail.reviewers, 'team_reviewers': context.actionDetail.teamReviewers })); } if (misc_1.isActiveTriggerWorkflow(context)) { // add empty commit to trigger pr event diff --git a/node_modules/@technote-space/github-action-pr-helper/dist/utils/misc.js b/node_modules/@technote-space/github-action-pr-helper/dist/utils/misc.js index e72590aa..caddf3c5 100644 --- a/node_modules/@technote-space/github-action-pr-helper/dist/utils/misc.js +++ b/node_modules/@technote-space/github-action-pr-helper/dist/utils/misc.js @@ -219,12 +219,12 @@ const checkSuiteState = (checkSuiteId) => (suite) => { }; exports.checkSuiteState = checkSuiteState; const isPassedAllChecks = (octokit, context) => __awaiter(void 0, void 0, void 0, function* () { - const { data: status } = yield octokit.repos.getCombinedStatusForRef(Object.assign(Object.assign({}, context.actionContext.repo), { ref: context.actionContext.sha })); + const { data: status } = yield octokit.rest.repos.getCombinedStatusForRef(Object.assign(Object.assign({}, context.actionContext.repo), { ref: context.actionContext.sha })); if ('success' !== status.state) { return false; } - const checkSuiteUrl = (yield octokit.actions.getWorkflowRun(Object.assign(Object.assign({}, context.actionContext.repo), { 'run_id': Number(process.env.GITHUB_RUN_ID) }))).data['check_suite_url']; + const checkSuiteUrl = (yield octokit.rest.actions.getWorkflowRun(Object.assign(Object.assign({}, context.actionContext.repo), { 'run_id': Number(process.env.GITHUB_RUN_ID) }))).data['check_suite_url']; const checkSuiteId = Number(checkSuiteUrl.replace(/^.+\/(\d+)$/, '$1')); - return !(yield octokit.paginate(octokit.checks.listSuitesForRef.endpoint.merge(Object.assign(Object.assign({}, context.actionContext.repo), { ref: context.actionContext.sha })))).filter(suite => exports.checkSuiteState(checkSuiteId)(suite)).length; + return !(yield octokit.paginate(octokit.rest.checks.listSuitesForRef.endpoint.merge(Object.assign(Object.assign({}, context.actionContext.repo), { ref: context.actionContext.sha })))).filter(suite => exports.checkSuiteState(checkSuiteId)(suite)).length; }); exports.isPassedAllChecks = isPassedAllChecks; diff --git a/node_modules/@technote-space/github-action-pr-helper/dist/utils/process.js b/node_modules/@technote-space/github-action-pr-helper/dist/utils/process.js index 3e615653..d7ae4ed5 100644 --- a/node_modules/@technote-space/github-action-pr-helper/dist/utils/process.js +++ b/node_modules/@technote-space/github-action-pr-helper/dist/utils/process.js @@ -104,7 +104,7 @@ const autoMerge = (pr, logger, octokit, context) => __awaiter(void 0, void 0, vo logger.info('All checks are passed.'); logger.startProcess('Auto merging...'); try { - yield octokit.pulls.merge(Object.assign(Object.assign({}, context.actionContext.repo), { 'pull_number': pr.number })); + yield octokit.rest.pulls.merge(Object.assign(Object.assign({}, context.actionContext.repo), { 'pull_number': pr.number })); } catch (error) { logger.warn(error.message); diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/README.md b/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/README.md deleted file mode 100644 index 47acb127..00000000 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# openapi-types.ts - -> Generated TypeScript definitions based on GitHub's OpenAPI spec - -This repository continously converts [GitHub's OpenAPI specification](https://github.com/github/rest-api-description/) into TypeScript definitions and publishes them to npm as `@octokit/openapi-types` - -## License - -[MIT](LICENSE) diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-node/index.js b/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-node/index.js deleted file mode 100644 index bc729794..00000000 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-node/index.js +++ /dev/null @@ -1,8 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, '__esModule', { value: true }); - -const VERSION = "6.2.1"; - -exports.VERSION = VERSION; -//# sourceMappingURL=index.js.map diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-node/index.js.map b/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-node/index.js.map deleted file mode 100644 index 157cdb86..00000000 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-node/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sources":["../dist-src/version.js"],"sourcesContent":["export const VERSION = \"6.2.1\";\n"],"names":["VERSION"],"mappings":";;;;MAAaA,OAAO,GAAG;;;;"} \ No newline at end of file diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-src/generated/types.js b/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-src/generated/types.js deleted file mode 100644 index 6738d94e..00000000 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-src/generated/types.js +++ /dev/null @@ -1,5 +0,0 @@ -/** - * This file was auto-generated by openapi-typescript. - * Do not make direct changes to the file. - */ -export {}; diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-src/index.js b/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-src/index.js deleted file mode 100644 index 5d080663..00000000 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-src/index.js +++ /dev/null @@ -1,2 +0,0 @@ -export * from "./generated/types"; -export * from "./version"; diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-src/version.js b/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-src/version.js deleted file mode 100644 index 39b7af19..00000000 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-src/version.js +++ /dev/null @@ -1 +0,0 @@ -export const VERSION = "6.2.1"; diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-types/generated/types.d.ts b/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-types/generated/types.d.ts deleted file mode 100644 index 2979a2f5..00000000 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-types/generated/types.d.ts +++ /dev/null @@ -1,31583 +0,0 @@ -/** - * This file was auto-generated by openapi-typescript. - * Do not make direct changes to the file. - */ -export interface paths { - "/": { - /** Get Hypermedia links to resources accessible in GitHub's REST API */ - get: operations["meta/root"]; - }; - "/app": { - /** - * Returns the GitHub App associated with the authentication credentials used. To see how many app installations are associated with this GitHub App, see the `installations_count` in the response. For more details about your app's installations, see the "[List installations for the authenticated app](https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app)" endpoint. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - get: operations["apps/get-authenticated"]; - }; - "/app-manifests/{code}/conversions": { - /** Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`. */ - post: operations["apps/create-from-manifest"]; - }; - "/app/hook/config": { - /** - * Returns the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - get: operations["apps/get-webhook-config-for-app"]; - /** - * Updates the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - patch: operations["apps/update-webhook-config-for-app"]; - }; - "/app/installations": { - /** - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - * - * The permissions the installation has are included under the `permissions` key. - */ - get: operations["apps/list-installations"]; - }; - "/app/installations/{installation_id}": { - /** - * Enables an authenticated GitHub App to find an installation's information using the installation id. The installation's account type (`target_type`) will be either an organization or a user account, depending which account the repository belongs to. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - get: operations["apps/get-installation"]; - /** - * Uninstalls a GitHub App on a user, organization, or business account. If you prefer to temporarily suspend an app's access to your account's resources, then we recommend the "[Suspend an app installation](https://docs.github.com/rest/reference/apps/#suspend-an-app-installation)" endpoint. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - delete: operations["apps/delete-installation"]; - }; - "/app/installations/{installation_id}/access_tokens": { - /** - * Creates an installation access token that enables a GitHub App to make authenticated API requests for the app's installation on an organization or individual account. Installation tokens expire one hour from the time you create them. Using an expired token produces a status code of `401 - Unauthorized`, and requires creating a new installation token. By default the installation token has access to all repositories that the installation can access. To restrict the access to specific repositories, you can provide the `repository_ids` when creating the token. When you omit `repository_ids`, the response does not contain the `repositories` key. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - post: operations["apps/create-installation-access-token"]; - }; - "/app/installations/{installation_id}/suspended": { - /** - * Suspends a GitHub App on a user, organization, or business account, which blocks the app from accessing the account's resources. When a GitHub App is suspended, the app's access to the GitHub API or webhook events is blocked for that account. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - put: operations["apps/suspend-installation"]; - /** - * Removes a GitHub App installation suspension. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - delete: operations["apps/unsuspend-installation"]; - }; - "/applications/grants": { - /** - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - * - * You can use this API to list the set of OAuth applications that have been granted access to your account. Unlike the [list your authorizations](https://docs.github.com/rest/reference/oauth-authorizations#list-your-authorizations) API, this API does not manage individual tokens. This API will return one entry for each OAuth application that has been granted access to your account, regardless of the number of tokens an application has generated for your user. The list of OAuth applications returned matches what is shown on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). The `scopes` returned are the union of scopes authorized for the application. For example, if an application has one token with `repo` scope and another token with `user` scope, the grant will return `["repo", "user"]`. - */ - get: operations["oauth-authorizations/list-grants"]; - }; - "/applications/grants/{grant_id}": { - /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ - get: operations["oauth-authorizations/get-grant"]; - /** - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - * - * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for your user. Once deleted, the application has no access to your account and is no longer listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). - */ - delete: operations["oauth-authorizations/delete-grant"]; - }; - "/applications/{client_id}/grant": { - /** - * OAuth application owners can revoke a grant for their OAuth application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid OAuth `access_token` as an input parameter and the grant for the token's owner will be deleted. - * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). - */ - delete: operations["apps/delete-authorization"]; - }; - "/applications/{client_id}/grants/{access_token}": { - /** - * **Deprecation Notice:** GitHub will discontinue OAuth endpoints that contain `access_token` in the path parameter. We have introduced new endpoints that allow you to securely manage tokens for OAuth Apps by moving `access_token` to the request body. For more information, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/). - * - * OAuth application owners can revoke a grant for their OAuth application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid token as `:access_token` and the grant for the token's owner will be deleted. - * - * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the Applications settings page under "Authorized OAuth Apps" on GitHub](https://github.com/settings/applications#authorized). - */ - delete: operations["apps/revoke-grant-for-application"]; - }; - "/applications/{client_id}/token": { - /** OAuth applications can use a special API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to use this endpoint, where the username is the OAuth application `client_id` and the password is its `client_secret`. Invalid tokens will return `404 NOT FOUND`. */ - post: operations["apps/check-token"]; - /** OAuth application owners can revoke a single token for an OAuth application. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. */ - delete: operations["apps/delete-token"]; - /** OAuth applications can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. */ - patch: operations["apps/reset-token"]; - }; - "/applications/{client_id}/token/scoped": { - /** Use a non-scoped user-to-server OAuth access token to create a repository scoped and/or permission scoped user-to-server OAuth access token. You can specify which repositories the token can access and which permissions are granted to the token. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. */ - post: operations["apps/scope-token"]; - }; - "/applications/{client_id}/tokens/{access_token}": { - /** - * **Deprecation Notice:** GitHub will discontinue OAuth endpoints that contain `access_token` in the path parameter. We have introduced new endpoints that allow you to securely manage tokens for OAuth Apps by moving `access_token` to the request body. For more information, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/). - * - * OAuth applications can use a special API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. - */ - get: operations["apps/check-authorization"]; - /** - * **Deprecation Notice:** GitHub will discontinue OAuth endpoints that contain `access_token` in the path parameter. We have introduced new endpoints that allow you to securely manage tokens for OAuth Apps by moving `access_token` to the request body. For more information, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/). - * - * OAuth applications can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. - */ - post: operations["apps/reset-authorization"]; - /** - * **Deprecation Notice:** GitHub will discontinue OAuth endpoints that contain `access_token` in the path parameter. We have introduced new endpoints that allow you to securely manage tokens for OAuth Apps by moving `access_token` to the request body. For more information, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/). - * - * OAuth application owners can revoke a single token for an OAuth application. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. - */ - delete: operations["apps/revoke-authorization-for-application"]; - }; - "/apps/{app_slug}": { - /** - * **Note**: The `:app_slug` is just the URL-friendly name of your GitHub App. You can find this on the settings page for your GitHub App (e.g., `https://github.com/settings/apps/:app_slug`). - * - * If the GitHub App you specify is public, you can access this endpoint without authenticating. If the GitHub App you specify is private, you must authenticate with a [personal access token](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - get: operations["apps/get-by-slug"]; - }; - "/authorizations": { - /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ - get: operations["oauth-authorizations/list-authorizations"]; - /** - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - * - * **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api). - * - * Creates OAuth tokens using [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication). If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." - * - * To create tokens for a particular OAuth application using this endpoint, you must authenticate as the user you want to create an authorization for and provide the app's client ID and secret, found on your OAuth application's settings page. If your OAuth application intends to create multiple tokens for one user, use `fingerprint` to differentiate between them. - * - * You can also create tokens on GitHub from the [personal access tokens settings](https://github.com/settings/tokens) page. Read more about these tokens in [the GitHub Help documentation](https://help.github.com/articles/creating-an-access-token-for-command-line-use). - * - * Organizations that enforce SAML SSO require personal access tokens to be allowed. Read more about allowing tokens in [the GitHub Help documentation](https://help.github.com/articles/about-identity-and-access-management-with-saml-single-sign-on). - */ - post: operations["oauth-authorizations/create-authorization"]; - }; - "/authorizations/clients/{client_id}": { - /** - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - * - * **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api). - * - * Creates a new authorization for the specified OAuth application, only if an authorization for that application doesn't already exist for the user. The URL includes the 20 character client ID for the OAuth app that is requesting the token. It returns the user's existing authorization for the application if one is present. Otherwise, it creates and returns a new one. - * - * If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." - * - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - */ - put: operations["oauth-authorizations/get-or-create-authorization-for-app"]; - }; - "/authorizations/clients/{client_id}/{fingerprint}": { - /** - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - * - * **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api). - * - * This method will create a new authorization for the specified OAuth application, only if an authorization for that application and fingerprint do not already exist for the user. The URL includes the 20 character client ID for the OAuth app that is requesting the token. `fingerprint` is a unique string to distinguish an authorization from others created for the same client ID and user. It returns the user's existing authorization for the application if one is present. Otherwise, it creates and returns a new one. - * - * If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." - */ - put: operations["oauth-authorizations/get-or-create-authorization-for-app-and-fingerprint"]; - }; - "/authorizations/{authorization_id}": { - /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ - get: operations["oauth-authorizations/get-authorization"]; - /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ - delete: operations["oauth-authorizations/delete-authorization"]; - /** - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - * - * If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." - * - * You can only send one of these scope keys at a time. - */ - patch: operations["oauth-authorizations/update-authorization"]; - }; - "/codes_of_conduct": { - get: operations["codes-of-conduct/get-all-codes-of-conduct"]; - }; - "/codes_of_conduct/{key}": { - get: operations["codes-of-conduct/get-conduct-code"]; - }; - "/content_references/{content_reference_id}/attachments": { - /** - * Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment. - * - * The app must create a content attachment within six hours of the content reference URL being posted. See "[Using content attachments](https://docs.github.com/apps/using-content-attachments/)" for details about content attachments. - * - * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - post: operations["apps/create-content-attachment"]; - }; - "/emojis": { - /** Lists all the emojis available to use on GitHub. */ - get: operations["emojis/get"]; - }; - "/enterprises/{enterprise}/actions/permissions": { - /** - * Gets the GitHub Actions permissions policy for organizations and allowed actions in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - get: operations["enterprise-admin/get-github-actions-permissions-enterprise"]; - /** - * Sets the GitHub Actions permissions policy for organizations and allowed actions in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - put: operations["enterprise-admin/set-github-actions-permissions-enterprise"]; - }; - "/enterprises/{enterprise}/actions/permissions/organizations": { - /** - * Lists the organizations that are selected to have GitHub Actions enabled in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - get: operations["enterprise-admin/list-selected-organizations-enabled-github-actions-enterprise"]; - /** - * Replaces the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - put: operations["enterprise-admin/set-selected-organizations-enabled-github-actions-enterprise"]; - }; - "/enterprises/{enterprise}/actions/permissions/organizations/{org_id}": { - /** - * Adds an organization to the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - put: operations["enterprise-admin/enable-selected-organization-github-actions-enterprise"]; - /** - * Removes an organization from the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - delete: operations["enterprise-admin/disable-selected-organization-github-actions-enterprise"]; - }; - "/enterprises/{enterprise}/actions/permissions/selected-actions": { - /** - * Gets the selected actions that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - get: operations["enterprise-admin/get-allowed-actions-enterprise"]; - /** - * Sets the actions that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - put: operations["enterprise-admin/set-allowed-actions-enterprise"]; - }; - "/enterprises/{enterprise}/actions/runner-groups": { - /** - * Lists all self-hosted runner groups for an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - get: operations["enterprise-admin/list-self-hosted-runner-groups-for-enterprise"]; - /** - * Creates a new self-hosted runner group for an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - post: operations["enterprise-admin/create-self-hosted-runner-group-for-enterprise"]; - }; - "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}": { - /** - * Gets a specific self-hosted runner group for an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - get: operations["enterprise-admin/get-self-hosted-runner-group-for-enterprise"]; - /** - * Deletes a self-hosted runner group for an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - delete: operations["enterprise-admin/delete-self-hosted-runner-group-from-enterprise"]; - /** - * Updates the `name` and `visibility` of a self-hosted runner group in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - patch: operations["enterprise-admin/update-self-hosted-runner-group-for-enterprise"]; - }; - "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations": { - /** - * Lists the organizations with access to a self-hosted runner group. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - get: operations["enterprise-admin/list-org-access-to-self-hosted-runner-group-in-enterprise"]; - /** - * Replaces the list of organizations that have access to a self-hosted runner configured in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - put: operations["enterprise-admin/set-org-access-to-self-hosted-runner-group-in-enterprise"]; - }; - "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id}": { - /** - * Adds an organization to the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - put: operations["enterprise-admin/add-org-access-to-self-hosted-runner-group-in-enterprise"]; - /** - * Removes an organization from the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - delete: operations["enterprise-admin/remove-org-access-to-self-hosted-runner-group-in-enterprise"]; - }; - "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners": { - /** - * Lists the self-hosted runners that are in a specific enterprise group. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - get: operations["enterprise-admin/list-self-hosted-runners-in-group-for-enterprise"]; - /** - * Replaces the list of self-hosted runners that are part of an enterprise runner group. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - put: operations["enterprise-admin/set-self-hosted-runners-in-group-for-enterprise"]; - }; - "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": { - /** - * Adds a self-hosted runner to a runner group configured in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` - * scope to use this endpoint. - */ - put: operations["enterprise-admin/add-self-hosted-runner-to-group-for-enterprise"]; - /** - * Removes a self-hosted runner from a group configured in an enterprise. The runner is then returned to the default group. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - delete: operations["enterprise-admin/remove-self-hosted-runner-from-group-for-enterprise"]; - }; - "/enterprises/{enterprise}/actions/runners": { - /** - * Lists all self-hosted runners configured for an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - get: operations["enterprise-admin/list-self-hosted-runners-for-enterprise"]; - }; - "/enterprises/{enterprise}/actions/runners/downloads": { - /** - * Lists binaries for the runner application that you can download and run. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - get: operations["enterprise-admin/list-runner-applications-for-enterprise"]; - }; - "/enterprises/{enterprise}/actions/runners/registration-token": { - /** - * Returns a token that you can pass to the `config` script. The token expires after one hour. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - * - * #### Example using registration token - * - * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. - * - * ``` - * ./config.sh --url https://github.com/enterprises/octo-enterprise --token TOKEN - * ``` - */ - post: operations["enterprise-admin/create-registration-token-for-enterprise"]; - }; - "/enterprises/{enterprise}/actions/runners/remove-token": { - /** - * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an enterprise. The token expires after one hour. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - * - * #### Example using remove token - * - * To remove your self-hosted runner from an enterprise, replace `TOKEN` with the remove token provided by this - * endpoint. - * - * ``` - * ./config.sh remove --token TOKEN - * ``` - */ - post: operations["enterprise-admin/create-remove-token-for-enterprise"]; - }; - "/enterprises/{enterprise}/actions/runners/{runner_id}": { - /** - * Gets a specific self-hosted runner configured in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - get: operations["enterprise-admin/get-self-hosted-runner-for-enterprise"]; - /** - * Forces the removal of a self-hosted runner from an enterprise. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - delete: operations["enterprise-admin/delete-self-hosted-runner-from-enterprise"]; - }; - "/enterprises/{enterprise}/audit-log": { - /** Gets the audit log for an enterprise. To use this endpoint, you must be an enterprise admin, and you must use an access token with the `admin:enterprise` scope. */ - get: operations["audit-log/get-audit-log"]; - }; - "/enterprises/{enterprise}/settings/billing/actions": { - /** - * Gets the summary of the free and paid GitHub Actions minutes used. - * - * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * The authenticated user must be an enterprise admin. - */ - get: operations["billing/get-github-actions-billing-ghe"]; - }; - "/enterprises/{enterprise}/settings/billing/packages": { - /** - * Gets the free and paid storage used for GitHub Packages in gigabytes. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * The authenticated user must be an enterprise admin. - */ - get: operations["billing/get-github-packages-billing-ghe"]; - }; - "/enterprises/{enterprise}/settings/billing/shared-storage": { - /** - * Gets the estimated paid and estimated total storage used for GitHub Actions and Github Packages. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * The authenticated user must be an enterprise admin. - */ - get: operations["billing/get-shared-storage-billing-ghe"]; - }; - "/events": { - /** We delay the public events feed by five minutes, which means the most recent event returned by the public events API actually occurred at least five minutes ago. */ - get: operations["activity/list-public-events"]; - }; - "/feeds": { - /** - * GitHub provides several timeline resources in [Atom](http://en.wikipedia.org/wiki/Atom_(standard)) format. The Feeds API lists all the feeds available to the authenticated user: - * - * * **Timeline**: The GitHub global public timeline - * * **User**: The public timeline for any user, using [URI template](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) - * * **Current user public**: The public timeline for the authenticated user - * * **Current user**: The private timeline for the authenticated user - * * **Current user actor**: The private timeline for activity created by the authenticated user - * * **Current user organizations**: The private timeline for the organizations the authenticated user is a member of. - * * **Security advisories**: A collection of public announcements that provide information about security-related vulnerabilities in software on GitHub. - * - * **Note**: Private feeds are only returned when [authenticating via Basic Auth](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) since current feed URIs use the older, non revocable auth tokens. - */ - get: operations["activity/get-feeds"]; - }; - "/gists": { - /** Lists the authenticated user's gists or if called anonymously, this endpoint returns all public gists: */ - get: operations["gists/list"]; - /** - * Allows you to add a new gist with one or more files. - * - * **Note:** Don't name your files "gistfile" with a numerical suffix. This is the format of the automatic naming scheme that Gist uses internally. - */ - post: operations["gists/create"]; - }; - "/gists/public": { - /** - * List public gists sorted by most recently updated to least recently updated. - * - * Note: With [pagination](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination), you can fetch up to 3000 gists. For example, you can fetch 100 pages with 30 gists per page or 30 pages with 100 gists per page. - */ - get: operations["gists/list-public"]; - }; - "/gists/starred": { - /** List the authenticated user's starred gists: */ - get: operations["gists/list-starred"]; - }; - "/gists/{gist_id}": { - get: operations["gists/get"]; - delete: operations["gists/delete"]; - /** Allows you to update or delete a gist file and rename gist files. Files from the previous version of the gist that aren't explicitly changed during an edit are unchanged. */ - patch: operations["gists/update"]; - }; - "/gists/{gist_id}/comments": { - get: operations["gists/list-comments"]; - post: operations["gists/create-comment"]; - }; - "/gists/{gist_id}/comments/{comment_id}": { - get: operations["gists/get-comment"]; - delete: operations["gists/delete-comment"]; - patch: operations["gists/update-comment"]; - }; - "/gists/{gist_id}/commits": { - get: operations["gists/list-commits"]; - }; - "/gists/{gist_id}/forks": { - get: operations["gists/list-forks"]; - /** **Note**: This was previously `/gists/:gist_id/fork`. */ - post: operations["gists/fork"]; - }; - "/gists/{gist_id}/star": { - get: operations["gists/check-is-starred"]; - /** Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." */ - put: operations["gists/star"]; - delete: operations["gists/unstar"]; - }; - "/gists/{gist_id}/{sha}": { - get: operations["gists/get-revision"]; - }; - "/gitignore/templates": { - /** List all templates available to pass as an option when [creating a repository](https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user). */ - get: operations["gitignore/get-all-templates"]; - }; - "/gitignore/templates/{name}": { - /** - * The API also allows fetching the source of a single template. - * Use the raw [media type](https://docs.github.com/rest/overview/media-types/) to get the raw contents. - */ - get: operations["gitignore/get-template"]; - }; - "/installation/repositories": { - /** - * List repositories that an app installation can access. - * - * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - get: operations["apps/list-repos-accessible-to-installation"]; - }; - "/installation/token": { - /** - * Revokes the installation token you're using to authenticate as an installation and access this endpoint. - * - * Once an installation token is revoked, the token is invalidated and cannot be used. Other endpoints that require the revoked installation token must have a new installation token to work. You can create a new token using the "[Create an installation access token for an app](https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app)" endpoint. - * - * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - delete: operations["apps/revoke-installation-access-token"]; - }; - "/issues": { - /** - * List issues assigned to the authenticated user across all visible repositories including owned repositories, member - * repositories, and organization repositories. You can use the `filter` query parameter to fetch issues that are not - * necessarily assigned to you. - * - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - get: operations["issues/list"]; - }; - "/licenses": { - get: operations["licenses/get-all-commonly-used"]; - }; - "/licenses/{license}": { - get: operations["licenses/get"]; - }; - "/markdown": { - post: operations["markdown/render"]; - }; - "/markdown/raw": { - /** You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less. */ - post: operations["markdown/render-raw"]; - }; - "/marketplace_listing/accounts/{account_id}": { - /** - * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - get: operations["apps/get-subscription-plan-for-account"]; - }; - "/marketplace_listing/plans": { - /** - * Lists all plans that are part of your GitHub Marketplace listing. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - get: operations["apps/list-plans"]; - }; - "/marketplace_listing/plans/{plan_id}/accounts": { - /** - * Returns user and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - get: operations["apps/list-accounts-for-plan"]; - }; - "/marketplace_listing/stubbed/accounts/{account_id}": { - /** - * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - get: operations["apps/get-subscription-plan-for-account-stubbed"]; - }; - "/marketplace_listing/stubbed/plans": { - /** - * Lists all plans that are part of your GitHub Marketplace listing. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - get: operations["apps/list-plans-stubbed"]; - }; - "/marketplace_listing/stubbed/plans/{plan_id}/accounts": { - /** - * Returns repository and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - get: operations["apps/list-accounts-for-plan-stubbed"]; - }; - "/meta": { - /** - * Returns meta information about GitHub, including a list of GitHub's IP addresses. For more information, see "[About GitHub's IP addresses](https://help.github.com/articles/about-github-s-ip-addresses/)." - * - * **Note:** The IP addresses shown in the documentation's response are only example values. You must always query the API directly to get the latest list of IP addresses. - */ - get: operations["meta/get"]; - }; - "/networks/{owner}/{repo}/events": { - get: operations["activity/list-public-events-for-repo-network"]; - }; - "/notifications": { - /** List all notifications for the current user, sorted by most recently updated. */ - get: operations["activity/list-notifications-for-authenticated-user"]; - /** Marks all notifications as "read" removes it from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. */ - put: operations["activity/mark-notifications-as-read"]; - }; - "/notifications/threads/{thread_id}": { - get: operations["activity/get-thread"]; - patch: operations["activity/mark-thread-as-read"]; - }; - "/notifications/threads/{thread_id}/subscription": { - /** - * This checks to see if the current user is subscribed to a thread. You can also [get a repository subscription](https://docs.github.com/rest/reference/activity#get-a-repository-subscription). - * - * Note that subscriptions are only generated if a user is participating in a conversation--for example, they've replied to the thread, were **@mentioned**, or manually subscribe to a thread. - */ - get: operations["activity/get-thread-subscription-for-authenticated-user"]; - /** - * If you are watching a repository, you receive notifications for all threads by default. Use this endpoint to ignore future notifications for threads until you comment on the thread or get an **@mention**. - * - * You can also use this endpoint to subscribe to threads that you are currently not receiving notifications for or to subscribed to threads that you have previously ignored. - * - * Unsubscribing from a conversation in a repository that you are not watching is functionally equivalent to the [Delete a thread subscription](https://docs.github.com/rest/reference/activity#delete-a-thread-subscription) endpoint. - */ - put: operations["activity/set-thread-subscription"]; - /** Mutes all future notifications for a conversation until you comment on the thread or get an **@mention**. If you are watching the repository of the thread, you will still receive notifications. To ignore future notifications for a repository you are watching, use the [Set a thread subscription](https://docs.github.com/rest/reference/activity#set-a-thread-subscription) endpoint and set `ignore` to `true`. */ - delete: operations["activity/delete-thread-subscription"]; - }; - "/octocat": { - /** Get the octocat as ASCII art */ - get: operations["meta/get-octocat"]; - }; - "/organizations": { - /** - * Lists all organizations, in the order that they were created on GitHub. - * - * **Note:** Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of organizations. - */ - get: operations["orgs/list"]; - }; - "/orgs/{org}": { - /** - * To see many of the organization response values, you need to be an authenticated organization owner with the `admin:org` scope. When the value of `two_factor_requirement_enabled` is `true`, the organization requires all members, billing managers, and outside collaborators to enable [two-factor authentication](https://help.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/). - * - * GitHub Apps with the `Organization plan` permission can use this endpoint to retrieve information about an organization's GitHub plan. See "[Authenticating with GitHub Apps](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/)" for details. For an example response, see 'Response with GitHub plan information' below." - */ - get: operations["orgs/get"]; - /** - * **Parameter Deprecation Notice:** GitHub will replace and discontinue `members_allowed_repository_creation_type` in favor of more granular permissions. The new input parameters are `members_can_create_public_repositories`, `members_can_create_private_repositories` for all organizations and `members_can_create_internal_repositories` for organizations associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see the [blog post](https://developer.github.com/changes/2019-12-03-internal-visibility-changes). - * - * Enables an authenticated organization owner with the `admin:org` scope to update the organization's profile and member privileges. - */ - patch: operations["orgs/update"]; - }; - "/orgs/{org}/actions/permissions": { - /** - * Gets the GitHub Actions permissions policy for repositories and allowed actions in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - get: operations["actions/get-github-actions-permissions-organization"]; - /** - * Sets the GitHub Actions permissions policy for repositories and allowed actions in an organization. - * - * If the organization belongs to an enterprise that has set restrictive permissions at the enterprise level, such as `allowed_actions` to `selected` actions, then you cannot override them for the organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - put: operations["actions/set-github-actions-permissions-organization"]; - }; - "/orgs/{org}/actions/permissions/repositories": { - /** - * Lists the selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - get: operations["actions/list-selected-repositories-enabled-github-actions-organization"]; - /** - * Replaces the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - put: operations["actions/set-selected-repositories-enabled-github-actions-organization"]; - }; - "/orgs/{org}/actions/permissions/repositories/{repository_id}": { - /** - * Adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - put: operations["actions/enable-selected-repository-github-actions-organization"]; - /** - * Removes a repository from the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - delete: operations["actions/disable-selected-repository-github-actions-organization"]; - }; - "/orgs/{org}/actions/permissions/selected-actions": { - /** - * Gets the selected actions that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."" - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - get: operations["actions/get-allowed-actions-organization"]; - /** - * Sets the actions that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * If the organization belongs to an enterprise that has `selected` actions set at the enterprise level, then you cannot override any of the enterprise's allowed actions settings. - * - * To use the `patterns_allowed` setting for private repositories, the organization must belong to an enterprise. If the organization does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories in the organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - put: operations["actions/set-allowed-actions-organization"]; - }; - "/orgs/{org}/actions/runner-groups": { - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Lists all self-hosted runner groups configured in an organization and inherited from an enterprise. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - get: operations["actions/list-self-hosted-runner-groups-for-org"]; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Creates a new self-hosted runner group for an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - post: operations["actions/create-self-hosted-runner-group-for-org"]; - }; - "/orgs/{org}/actions/runner-groups/{runner_group_id}": { - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Gets a specific self-hosted runner group for an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - get: operations["actions/get-self-hosted-runner-group-for-org"]; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Deletes a self-hosted runner group for an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - delete: operations["actions/delete-self-hosted-runner-group-from-org"]; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Updates the `name` and `visibility` of a self-hosted runner group in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - patch: operations["actions/update-self-hosted-runner-group-for-org"]; - }; - "/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories": { - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Lists the repositories with access to a self-hosted runner group configured in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - get: operations["actions/list-repo-access-to-self-hosted-runner-group-in-org"]; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Replaces the list of repositories that have access to a self-hosted runner group configured in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - put: operations["actions/set-repo-access-to-self-hosted-runner-group-in-org"]; - }; - "/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}": { - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * - * Adds a repository to the list of selected repositories that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an organization](#create-a-self-hosted-runner-group-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` - * scope to use this endpoint. - */ - put: operations["actions/add-repo-access-to-self-hosted-runner-group-in-org"]; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * - * Removes a repository from the list of selected repositories that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an organization](#create-a-self-hosted-runner-group-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - delete: operations["actions/remove-repo-access-to-self-hosted-runner-group-in-org"]; - }; - "/orgs/{org}/actions/runner-groups/{runner_group_id}/runners": { - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Lists self-hosted runners that are in a specific organization group. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - get: operations["actions/list-self-hosted-runners-in-group-for-org"]; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Replaces the list of self-hosted runners that are part of an organization runner group. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - put: operations["actions/set-self-hosted-runners-in-group-for-org"]; - }; - "/orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": { - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * - * Adds a self-hosted runner to a runner group configured in an organization. - * - * You must authenticate using an access token with the `admin:org` - * scope to use this endpoint. - */ - put: operations["actions/add-self-hosted-runner-to-group-for-org"]; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * - * Removes a self-hosted runner from a group configured in an organization. The runner is then returned to the default group. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - delete: operations["actions/remove-self-hosted-runner-from-group-for-org"]; - }; - "/orgs/{org}/actions/runners": { - /** - * Lists all self-hosted runners configured in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - get: operations["actions/list-self-hosted-runners-for-org"]; - }; - "/orgs/{org}/actions/runners/downloads": { - /** - * Lists binaries for the runner application that you can download and run. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - get: operations["actions/list-runner-applications-for-org"]; - }; - "/orgs/{org}/actions/runners/registration-token": { - /** - * Returns a token that you can pass to the `config` script. The token expires after one hour. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - * - * #### Example using registration token - * - * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. - * - * ``` - * ./config.sh --url https://github.com/octo-org --token TOKEN - * ``` - */ - post: operations["actions/create-registration-token-for-org"]; - }; - "/orgs/{org}/actions/runners/remove-token": { - /** - * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an organization. The token expires after one hour. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - * - * #### Example using remove token - * - * To remove your self-hosted runner from an organization, replace `TOKEN` with the remove token provided by this - * endpoint. - * - * ``` - * ./config.sh remove --token TOKEN - * ``` - */ - post: operations["actions/create-remove-token-for-org"]; - }; - "/orgs/{org}/actions/runners/{runner_id}": { - /** - * Gets a specific self-hosted runner configured in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - get: operations["actions/get-self-hosted-runner-for-org"]; - /** - * Forces the removal of a self-hosted runner from an organization. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - delete: operations["actions/delete-self-hosted-runner-from-org"]; - }; - "/orgs/{org}/actions/secrets": { - /** Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - get: operations["actions/list-org-secrets"]; - }; - "/orgs/{org}/actions/secrets/public-key": { - /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - get: operations["actions/get-org-public-key"]; - }; - "/orgs/{org}/actions/secrets/{secret_name}": { - /** Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - get: operations["actions/get-org-secret"]; - /** - * Creates or updates an organization secret with an encrypted value. Encrypt your secret using - * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access - * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to - * use this endpoint. - * - * #### Example encrypting a secret using Node.js - * - * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. - * - * ``` - * const sodium = require('tweetsodium'); - * - * const key = "base64-encoded-public-key"; - * const value = "plain-text-secret"; - * - * // Convert the message and key to Uint8Array's (Buffer implements that interface) - * const messageBytes = Buffer.from(value); - * const keyBytes = Buffer.from(key, 'base64'); - * - * // Encrypt using LibSodium. - * const encryptedBytes = sodium.seal(messageBytes, keyBytes); - * - * // Base64 the encrypted secret - * const encrypted = Buffer.from(encryptedBytes).toString('base64'); - * - * console.log(encrypted); - * ``` - * - * - * #### Example encrypting a secret using Python - * - * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/stable/public/#nacl-public-sealedbox) with Python 3. - * - * ``` - * from base64 import b64encode - * from nacl import encoding, public - * - * def encrypt(public_key: str, secret_value: str) -> str: - * """Encrypt a Unicode string using the public key.""" - * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) - * sealed_box = public.SealedBox(public_key) - * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) - * return b64encode(encrypted).decode("utf-8") - * ``` - * - * #### Example encrypting a secret using C# - * - * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. - * - * ``` - * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); - * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); - * - * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); - * - * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); - * ``` - * - * #### Example encrypting a secret using Ruby - * - * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. - * - * ```ruby - * require "rbnacl" - * require "base64" - * - * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") - * public_key = RbNaCl::PublicKey.new(key) - * - * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) - * encrypted_secret = box.encrypt("my_secret") - * - * # Print the base64 encoded secret - * puts Base64.strict_encode64(encrypted_secret) - * ``` - */ - put: operations["actions/create-or-update-org-secret"]; - /** Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - delete: operations["actions/delete-org-secret"]; - }; - "/orgs/{org}/actions/secrets/{secret_name}/repositories": { - /** Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - get: operations["actions/list-selected-repos-for-org-secret"]; - /** Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - put: operations["actions/set-selected-repos-for-org-secret"]; - }; - "/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": { - /** Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - put: operations["actions/add-selected-repo-to-org-secret"]; - /** Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - delete: operations["actions/remove-selected-repo-from-org-secret"]; - }; - "/orgs/{org}/audit-log": { - /** - * Gets the audit log for an organization. For more information, see "[Reviewing the audit log for your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization)." - * - * To use this endpoint, you must be an organization owner, and you must use an access token with the `admin:org` scope. GitHub Apps must have the `organization_administration` read permission to use this endpoint. - */ - get: operations["orgs/get-audit-log"]; - }; - "/orgs/{org}/blocks": { - /** List the users blocked by an organization. */ - get: operations["orgs/list-blocked-users"]; - }; - "/orgs/{org}/blocks/{username}": { - get: operations["orgs/check-blocked-user"]; - put: operations["orgs/block-user"]; - delete: operations["orgs/unblock-user"]; - }; - "/orgs/{org}/credential-authorizations": { - /** - * Listing and deleting credential authorizations is available to organizations with GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products). - * - * An authenticated organization owner with the `read:org` scope can list all credential authorizations for an organization that uses SAML single sign-on (SSO). The credentials are either personal access tokens or SSH keys that organization members have authorized for the organization. For more information, see [About authentication with SAML single sign-on](https://help.github.com/en/articles/about-authentication-with-saml-single-sign-on). - */ - get: operations["orgs/list-saml-sso-authorizations"]; - }; - "/orgs/{org}/credential-authorizations/{credential_id}": { - /** - * Listing and deleting credential authorizations is available to organizations with GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products). - * - * An authenticated organization owner with the `admin:org` scope can remove a credential authorization for an organization that uses SAML SSO. Once you remove someone's credential authorization, they will need to create a new personal access token or SSH key and authorize it for the organization they want to access. - */ - delete: operations["orgs/remove-saml-sso-authorization"]; - }; - "/orgs/{org}/events": { - get: operations["activity/list-public-org-events"]; - }; - "/orgs/{org}/failed_invitations": { - /** The return hash contains `failed_at` and `failed_reason` fields which represent the time at which the invitation failed and the reason for the failure. */ - get: operations["orgs/list-failed-invitations"]; - }; - "/orgs/{org}/hooks": { - get: operations["orgs/list-webhooks"]; - /** Here's how you can create a hook that posts payloads in JSON format: */ - post: operations["orgs/create-webhook"]; - }; - "/orgs/{org}/hooks/{hook_id}": { - /** Returns a webhook configured in an organization. To get only the webhook `config` properties, see "[Get a webhook configuration for an organization](/rest/reference/orgs#get-a-webhook-configuration-for-an-organization)." */ - get: operations["orgs/get-webhook"]; - delete: operations["orgs/delete-webhook"]; - /** Updates a webhook configured in an organization. When you update a webhook, the `secret` will be overwritten. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for an organization](/rest/reference/orgs#update-a-webhook-configuration-for-an-organization)." */ - patch: operations["orgs/update-webhook"]; - }; - "/orgs/{org}/hooks/{hook_id}/config": { - /** - * Returns the webhook configuration for an organization. To get more information about the webhook, including the `active` state and `events`, use "[Get an organization webhook ](/rest/reference/orgs#get-an-organization-webhook)." - * - * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:read` permission. - */ - get: operations["orgs/get-webhook-config-for-org"]; - /** - * Updates the webhook configuration for an organization. To update more information about the webhook, including the `active` state and `events`, use "[Update an organization webhook ](/rest/reference/orgs#update-an-organization-webhook)." - * - * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:write` permission. - */ - patch: operations["orgs/update-webhook-config-for-org"]; - }; - "/orgs/{org}/hooks/{hook_id}/pings": { - /** This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. */ - post: operations["orgs/ping-webhook"]; - }; - "/orgs/{org}/installation": { - /** - * Enables an authenticated GitHub App to find the organization's installation information. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - get: operations["apps/get-org-installation"]; - }; - "/orgs/{org}/installations": { - /** Lists all GitHub Apps in an organization. The installation count includes all GitHub Apps installed on repositories in the organization. You must be an organization owner with `admin:read` scope to use this endpoint. */ - get: operations["orgs/list-app-installations"]; - }; - "/orgs/{org}/interaction-limits": { - /** Shows which type of GitHub user can interact with this organization and when the restriction expires. If there is no restrictions, you will see an empty response. */ - get: operations["interactions/get-restrictions-for-org"]; - /** Temporarily restricts interactions to a certain type of GitHub user in any public repository in the given organization. You must be an organization owner to set these restrictions. Setting the interaction limit at the organization level will overwrite any interaction limits that are set for individual repositories owned by the organization. */ - put: operations["interactions/set-restrictions-for-org"]; - /** Removes all interaction restrictions from public repositories in the given organization. You must be an organization owner to remove restrictions. */ - delete: operations["interactions/remove-restrictions-for-org"]; - }; - "/orgs/{org}/invitations": { - /** The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. */ - get: operations["orgs/list-pending-invitations"]; - /** - * Invite people to an organization by using their GitHub user ID or their email address. In order to create invitations in an organization, the authenticated user must be an organization owner. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - post: operations["orgs/create-invitation"]; - }; - "/orgs/{org}/invitations/{invitation_id}": { - /** - * Cancel an organization invitation. In order to cancel an organization invitation, the authenticated user must be an organization owner. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). - */ - delete: operations["orgs/cancel-invitation"]; - }; - "/orgs/{org}/invitations/{invitation_id}/teams": { - /** List all teams associated with an invitation. In order to see invitations in an organization, the authenticated user must be an organization owner. */ - get: operations["orgs/list-invitation-teams"]; - }; - "/orgs/{org}/issues": { - /** - * List issues in an organization assigned to the authenticated user. - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - get: operations["issues/list-for-org"]; - }; - "/orgs/{org}/members": { - /** List all users who are members of an organization. If the authenticated user is also a member of this organization then both concealed and public members will be returned. */ - get: operations["orgs/list-members"]; - }; - "/orgs/{org}/members/{username}": { - /** Check if a user is, publicly or privately, a member of the organization. */ - get: operations["orgs/check-membership-for-user"]; - /** Removing a user from this list will remove them from all teams and they will no longer have any access to the organization's repositories. */ - delete: operations["orgs/remove-member"]; - }; - "/orgs/{org}/memberships/{username}": { - /** In order to get a user's membership with an organization, the authenticated user must be an organization member. The `state` parameter in the response can be used to identify the user's membership status. */ - get: operations["orgs/get-membership-for-user"]; - /** - * Only authenticated organization owners can add a member to the organization or update the member's role. - * - * * If the authenticated user is _adding_ a member to the organization, the invited user will receive an email inviting them to the organization. The user's [membership status](https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user) will be `pending` until they accept the invitation. - * - * * Authenticated users can _update_ a user's membership by passing the `role` parameter. If the authenticated user changes a member's role to `admin`, the affected user will receive an email notifying them that they've been made an organization owner. If the authenticated user changes an owner's role to `member`, no email will be sent. - * - * **Rate limits** - * - * To prevent abuse, the authenticated user is limited to 50 organization invitations per 24 hour period. If the organization is more than one month old or on a paid plan, the limit is 500 invitations per 24 hour period. - */ - put: operations["orgs/set-membership-for-user"]; - /** - * In order to remove a user's membership with an organization, the authenticated user must be an organization owner. - * - * If the specified user is an active member of the organization, this will remove them from the organization. If the specified user has been invited to the organization, this will cancel their invitation. The specified user will receive an email notification in both cases. - */ - delete: operations["orgs/remove-membership-for-user"]; - }; - "/orgs/{org}/migrations": { - /** Lists the most recent migrations. */ - get: operations["migrations/list-for-org"]; - /** Initiates the generation of a migration archive. */ - post: operations["migrations/start-for-org"]; - }; - "/orgs/{org}/migrations/{migration_id}": { - /** - * Fetches the status of a migration. - * - * The `state` of a migration can be one of the following values: - * - * * `pending`, which means the migration hasn't started yet. - * * `exporting`, which means the migration is in progress. - * * `exported`, which means the migration finished successfully. - * * `failed`, which means the migration failed. - */ - get: operations["migrations/get-status-for-org"]; - }; - "/orgs/{org}/migrations/{migration_id}/archive": { - /** Fetches the URL to a migration archive. */ - get: operations["migrations/download-archive-for-org"]; - /** Deletes a previous migration archive. Migration archives are automatically deleted after seven days. */ - delete: operations["migrations/delete-archive-for-org"]; - }; - "/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock": { - /** Unlocks a repository that was locked for migration. You should unlock each migrated repository and [delete them](https://docs.github.com/rest/reference/repos#delete-a-repository) when the migration is complete and you no longer need the source data. */ - delete: operations["migrations/unlock-repo-for-org"]; - }; - "/orgs/{org}/migrations/{migration_id}/repositories": { - /** List all the repositories for this organization migration. */ - get: operations["migrations/list-repos-for-org"]; - }; - "/orgs/{org}/outside_collaborators": { - /** List all users who are outside collaborators of an organization. */ - get: operations["orgs/list-outside-collaborators"]; - }; - "/orgs/{org}/outside_collaborators/{username}": { - /** When an organization member is converted to an outside collaborator, they'll only have access to the repositories that their current team membership allows. The user will no longer be a member of the organization. For more information, see "[Converting an organization member to an outside collaborator](https://help.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)". */ - put: operations["orgs/convert-member-to-outside-collaborator"]; - /** Removing a user from this list will remove them from all the organization's repositories. */ - delete: operations["orgs/remove-outside-collaborator"]; - }; - "/orgs/{org}/packages/{package_type}/{package_name}": { - /** - * Gets a specific package in an organization. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - get: operations["packages/get-package-for-organization"]; - /** - * Deletes an entire package in an organization. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` scope. In addition: - * - If `package_type` is not `container`, your token must also include the `repo` scope. - * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. - */ - delete: operations["packages/delete-package-for-org"]; - }; - "/orgs/{org}/packages/{package_type}/{package_name}/restore": { - /** - * Restores an entire package in an organization. - * - * You can restore a deleted package under the following conditions: - * - The package was deleted within the last 30 days. - * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scope. In addition: - * - If `package_type` is not `container`, your token must also include the `repo` scope. - * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. - */ - post: operations["packages/restore-package-for-org"]; - }; - "/orgs/{org}/packages/{package_type}/{package_name}/versions": { - /** - * Returns all package versions for a package owned by an organization. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - get: operations["packages/get-all-package-versions-for-package-owned-by-org"]; - }; - "/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}": { - /** - * Gets a specific package version in an organization. - * - * You must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - get: operations["packages/get-package-version-for-organization"]; - /** - * Deletes a specific package version in an organization. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` scope. In addition: - * - If `package_type` is not `container`, your token must also include the `repo` scope. - * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. - */ - delete: operations["packages/delete-package-version-for-org"]; - }; - "/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": { - /** - * Restores a specific package version in an organization. - * - * You can restore a deleted package under the following conditions: - * - The package was deleted within the last 30 days. - * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scope. In addition: - * - If `package_type` is not `container`, your token must also include the `repo` scope. - * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. - */ - post: operations["packages/restore-package-version-for-org"]; - }; - "/orgs/{org}/projects": { - /** Lists the projects in an organization. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ - get: operations["projects/list-for-org"]; - /** Creates an organization project board. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ - post: operations["projects/create-for-org"]; - }; - "/orgs/{org}/public_members": { - /** Members of an organization can choose to have their membership publicized or not. */ - get: operations["orgs/list-public-members"]; - }; - "/orgs/{org}/public_members/{username}": { - get: operations["orgs/check-public-membership-for-user"]; - /** - * The user can publicize their own membership. (A user cannot publicize the membership for another user.) - * - * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - put: operations["orgs/set-public-membership-for-authenticated-user"]; - delete: operations["orgs/remove-public-membership-for-authenticated-user"]; - }; - "/orgs/{org}/repos": { - /** Lists repositories for the specified organization. */ - get: operations["repos/list-for-org"]; - /** - * Creates a new repository in the specified organization. The authenticated user must be a member of the organization. - * - * **OAuth scope requirements** - * - * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: - * - * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. - * * `repo` scope to create a private repository - */ - post: operations["repos/create-in-org"]; - }; - "/orgs/{org}/settings/billing/actions": { - /** - * Gets the summary of the free and paid GitHub Actions minutes used. - * - * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * Access tokens must have the `repo` or `admin:org` scope. - */ - get: operations["billing/get-github-actions-billing-org"]; - }; - "/orgs/{org}/settings/billing/packages": { - /** - * Gets the free and paid storage usued for GitHub Packages in gigabytes. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * Access tokens must have the `repo` or `admin:org` scope. - */ - get: operations["billing/get-github-packages-billing-org"]; - }; - "/orgs/{org}/settings/billing/shared-storage": { - /** - * Gets the estimated paid and estimated total storage used for GitHub Actions and Github Packages. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * Access tokens must have the `repo` or `admin:org` scope. - */ - get: operations["billing/get-shared-storage-billing-org"]; - }; - "/orgs/{org}/team-sync/groups": { - /** - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * List IdP groups available in an organization. You can limit your page results using the `per_page` parameter. GitHub generates a url-encoded `page` token using a cursor value for where the next page begins. For more information on cursor pagination, see "[Offset and Cursor Pagination explained](https://dev.to/jackmarchant/offset-and-cursor-pagination-explained-b89)." - * - * The `per_page` parameter provides pagination for a list of IdP groups the authenticated user can access in an organization. For example, if the user `octocat` wants to see two groups per page in `octo-org` via cURL, it would look like this: - */ - get: operations["teams/list-idp-groups-for-org"]; - }; - "/orgs/{org}/teams": { - /** Lists all teams in an organization that are visible to the authenticated user. */ - get: operations["teams/list"]; - /** - * To create a team, the authenticated user must be a member or owner of `{org}`. By default, organization members can create teams. Organization owners can limit team creation to organization owners. For more information, see "[Setting team creation permissions](https://help.github.com/en/articles/setting-team-creation-permissions-in-your-organization)." - * - * When you create a new team, you automatically become a team maintainer without explicitly adding yourself to the optional array of `maintainers`. For more information, see "[About teams](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/about-teams)". - */ - post: operations["teams/create"]; - }; - "/orgs/{org}/teams/{team_slug}": { - /** - * Gets a team using the team's `slug`. GitHub generates the `slug` from the team `name`. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}`. - */ - get: operations["teams/get-by-name"]; - /** - * To delete a team, the authenticated user must be an organization owner or team maintainer. - * - * If you are an organization owner, deleting a parent team will delete all of its child teams as well. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}`. - */ - delete: operations["teams/delete-in-org"]; - /** - * To edit a team, the authenticated user must either be an organization owner or a team maintainer. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}`. - */ - patch: operations["teams/update-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/discussions": { - /** - * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions`. - */ - get: operations["teams/list-discussions-in-org"]; - /** - * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions`. - */ - post: operations["teams/create-discussion-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": { - /** - * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. - */ - get: operations["teams/get-discussion-in-org"]; - /** - * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. - */ - delete: operations["teams/delete-discussion-in-org"]; - /** - * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. - */ - patch: operations["teams/update-discussion-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": { - /** - * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. - */ - get: operations["teams/list-discussion-comments-in-org"]; - /** - * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. - */ - post: operations["teams/create-discussion-comment-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": { - /** - * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. - */ - get: operations["teams/get-discussion-comment-in-org"]; - /** - * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. - */ - delete: operations["teams/delete-discussion-comment-in-org"]; - /** - * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. - */ - patch: operations["teams/update-discussion-comment-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": { - /** - * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments/). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. - */ - get: operations["reactions/list-for-team-discussion-comment-in-org"]; - /** - * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion comment. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. - */ - post: operations["reactions/create-for-team-discussion-comment-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}": { - /** - * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions/:reaction_id`. - * - * Delete a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - delete: operations["reactions/delete-for-team-discussion-comment"]; - }; - "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": { - /** - * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. - */ - get: operations["reactions/list-for-team-discussion-in-org"]; - /** - * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. - */ - post: operations["reactions/create-for-team-discussion-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}": { - /** - * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions/:reaction_id`. - * - * Delete a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - delete: operations["reactions/delete-for-team-discussion"]; - }; - "/orgs/{org}/teams/{team_slug}/invitations": { - /** - * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/invitations`. - */ - get: operations["teams/list-pending-invitations-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/members": { - /** - * Team members will include the members of child teams. - * - * To list members in a team, the team must be visible to the authenticated user. - */ - get: operations["teams/list-members-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/memberships/{username}": { - /** - * Team members will include the members of child teams. - * - * To get a user's membership with a team, the team must be visible to the authenticated user. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/memberships/{username}`. - * - * **Note:** - * The response contains the `state` of the membership and the member's `role`. - * - * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). - */ - get: operations["teams/get-membership-for-user-in-org"]; - /** - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Adds an organization member to a team. An authenticated organization owner or team maintainer can add organization members to a team. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - * - * An organization owner can add someone who is not part of the team's organization to a team. When an organization owner adds someone to a team who is not an organization member, this endpoint will send an invitation to the person via email. This newly-created membership will be in the "pending" state until the person accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. - * - * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/memberships/{username}`. - */ - put: operations["teams/add-or-update-membership-for-user-in-org"]; - /** - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/memberships/{username}`. - */ - delete: operations["teams/remove-membership-for-user-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/projects": { - /** - * Lists the organization projects for a team. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects`. - */ - get: operations["teams/list-projects-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/projects/{project_id}": { - /** - * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects/{project_id}`. - */ - get: operations["teams/check-permissions-for-project-in-org"]; - /** - * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/projects/{project_id}`. - */ - put: operations["teams/add-or-update-project-permissions-in-org"]; - /** - * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. This endpoint removes the project from the team, but does not delete the project. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/projects/{project_id}`. - */ - delete: operations["teams/remove-project-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/repos": { - /** - * Lists a team's repositories visible to the authenticated user. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos`. - */ - get: operations["teams/list-repos-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": { - /** - * Checks whether a team has `admin`, `push`, `maintain`, `triage`, or `pull` permission for a repository. Repositories inherited through a parent team will also be checked. - * - * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `application/vnd.github.v3.repository+json` accept header. - * - * If a team doesn't have permission for the repository, you will receive a `404 Not Found` response status. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. - */ - get: operations["teams/check-permissions-for-repo-in-org"]; - /** - * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. - * - * For more information about the permission levels, see "[Repository permission levels for an organization](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". - */ - put: operations["teams/add-or-update-repo-permissions-in-org"]; - /** - * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. This does not delete the repository, it just removes it from the team. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. - */ - delete: operations["teams/remove-repo-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/team-sync/group-mappings": { - /** - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * List IdP groups connected to a team on GitHub. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/team-sync/group-mappings`. - */ - get: operations["teams/list-idp-groups-in-org"]; - /** - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Creates, updates, or removes a connection between a team and an IdP group. When adding groups to a team, you must include all new and existing groups to avoid replacing existing groups with the new ones. Specifying an empty `groups` array will remove all connections for a team. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/team-sync/group-mappings`. - */ - patch: operations["teams/create-or-update-idp-group-connections-in-org"]; - }; - "/orgs/{org}/teams/{team_slug}/teams": { - /** - * Lists the child teams of the team specified by `{team_slug}`. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/teams`. - */ - get: operations["teams/list-child-in-org"]; - }; - "/projects/columns/cards/{card_id}": { - get: operations["projects/get-card"]; - delete: operations["projects/delete-card"]; - patch: operations["projects/update-card"]; - }; - "/projects/columns/cards/{card_id}/moves": { - post: operations["projects/move-card"]; - }; - "/projects/columns/{column_id}": { - get: operations["projects/get-column"]; - delete: operations["projects/delete-column"]; - patch: operations["projects/update-column"]; - }; - "/projects/columns/{column_id}/cards": { - get: operations["projects/list-cards"]; - /** - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by the `pull_request` key. - * - * Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - post: operations["projects/create-card"]; - }; - "/projects/columns/{column_id}/moves": { - post: operations["projects/move-column"]; - }; - "/projects/{project_id}": { - /** Gets a project by its `id`. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ - get: operations["projects/get"]; - /** Deletes a project board. Returns a `404 Not Found` status if projects are disabled. */ - delete: operations["projects/delete"]; - /** Updates a project board's information. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ - patch: operations["projects/update"]; - }; - "/projects/{project_id}/collaborators": { - /** Lists the collaborators for an organization project. For a project, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. You must be an organization owner or a project `admin` to list collaborators. */ - get: operations["projects/list-collaborators"]; - }; - "/projects/{project_id}/collaborators/{username}": { - /** Adds a collaborator to an organization project and sets their permission level. You must be an organization owner or a project `admin` to add a collaborator. */ - put: operations["projects/add-collaborator"]; - /** Removes a collaborator from an organization project. You must be an organization owner or a project `admin` to remove a collaborator. */ - delete: operations["projects/remove-collaborator"]; - }; - "/projects/{project_id}/collaborators/{username}/permission": { - /** Returns the collaborator's permission level for an organization project. Possible values for the `permission` key: `admin`, `write`, `read`, `none`. You must be an organization owner or a project `admin` to review a user's permission level. */ - get: operations["projects/get-permission-for-user"]; - }; - "/projects/{project_id}/columns": { - get: operations["projects/list-columns"]; - post: operations["projects/create-column"]; - }; - "/rate_limit": { - /** - * **Note:** Accessing this endpoint does not count against your REST API rate limit. - * - * **Note:** The `rate` object is deprecated. If you're writing new API client code or updating existing code, you should use the `core` object instead of the `rate` object. The `core` object contains the same information that is present in the `rate` object. - */ - get: operations["rate-limit/get"]; - }; - "/reactions/{reaction_id}": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Reactions API. We recommend migrating your existing code to use the new delete reactions endpoints. For more information, see this [blog post](https://developer.github.com/changes/2020-02-26-new-delete-reactions-endpoints/). - * - * OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), when deleting a [team discussion](https://docs.github.com/rest/reference/teams#discussions) or [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). - */ - delete: operations["reactions/delete-legacy"]; - }; - "/repos/{owner}/{repo}": { - /** - * When you pass the `scarlet-witch-preview` media type, requests to get a repository will also return the repository's code of conduct if it can be detected from the repository's code of conduct file. - * - * The `parent` and `source` objects are present when the repository is a fork. `parent` is the repository this repository was forked from, `source` is the ultimate source for the network. - */ - get: operations["repos/get"]; - /** - * Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required. - * - * If an organization owner has configured the organization to prevent members from deleting organization-owned - * repositories, you will get a `403 Forbidden` response. - */ - delete: operations["repos/delete"]; - /** **Note**: To edit a repository's topics, use the [Replace all repository topics](https://docs.github.com/rest/reference/repos#replace-all-repository-topics) endpoint. */ - patch: operations["repos/update"]; - }; - "/repos/{owner}/{repo}/actions/artifacts": { - /** Lists all artifacts for a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - get: operations["actions/list-artifacts-for-repo"]; - }; - "/repos/{owner}/{repo}/actions/artifacts/{artifact_id}": { - /** Gets a specific artifact for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - get: operations["actions/get-artifact"]; - /** Deletes an artifact for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ - delete: operations["actions/delete-artifact"]; - }; - "/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}": { - /** - * Gets a redirect URL to download an archive for a repository. This URL expires after 1 minute. Look for `Location:` in - * the response header to find the URL for the download. The `:archive_format` must be `zip`. Anyone with read access to - * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. - * GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - get: operations["actions/download-artifact"]; - }; - "/repos/{owner}/{repo}/actions/jobs/{job_id}": { - /** Gets a specific job in a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - get: operations["actions/get-job-for-workflow-run"]; - }; - "/repos/{owner}/{repo}/actions/jobs/{job_id}/logs": { - /** - * Gets a redirect URL to download a plain text file of logs for a workflow job. This link expires after 1 minute. Look - * for `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can - * use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must - * have the `actions:read` permission to use this endpoint. - */ - get: operations["actions/download-job-logs-for-workflow-run"]; - }; - "/repos/{owner}/{repo}/actions/permissions": { - /** - * Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions allowed to run in the repository. - * - * You must authenticate using an access token with the `repo` scope to use this - * endpoint. GitHub Apps must have the `administration` repository permission to use this API. - */ - get: operations["actions/get-github-actions-permissions-repository"]; - /** - * Sets the GitHub Actions permissions policy for enabling GitHub Actions and allowed actions in the repository. - * - * If the repository belongs to an organization or enterprise that has set restrictive permissions at the organization or enterprise levels, such as `allowed_actions` to `selected` actions, then you cannot override them for the repository. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. - */ - put: operations["actions/set-github-actions-permissions-repository"]; - }; - "/repos/{owner}/{repo}/actions/permissions/selected-actions": { - /** - * Gets the settings for selected actions that are allowed in a repository. To use this endpoint, the repository policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. - */ - get: operations["actions/get-allowed-actions-repository"]; - /** - * Sets the actions that are allowed in a repository. To use this endpoint, the repository permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." - * - * If the repository belongs to an organization or enterprise that has `selected` actions set at the organization or enterprise levels, then you cannot override any of the allowed actions settings. - * - * To use the `patterns_allowed` setting for private repositories, the repository must belong to an enterprise. If the repository does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. - */ - put: operations["actions/set-allowed-actions-repository"]; - }; - "/repos/{owner}/{repo}/actions/runners": { - /** Lists all self-hosted runners configured in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. */ - get: operations["actions/list-self-hosted-runners-for-repo"]; - }; - "/repos/{owner}/{repo}/actions/runners/downloads": { - /** - * Lists binaries for the runner application that you can download and run. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. - */ - get: operations["actions/list-runner-applications-for-repo"]; - }; - "/repos/{owner}/{repo}/actions/runners/registration-token": { - /** - * Returns a token that you can pass to the `config` script. The token expires after one hour. You must authenticate - * using an access token with the `repo` scope to use this endpoint. - * - * #### Example using registration token - * - * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. - * - * ``` - * ./config.sh --url https://github.com/octo-org/octo-repo-artifacts --token TOKEN - * ``` - */ - post: operations["actions/create-registration-token-for-repo"]; - }; - "/repos/{owner}/{repo}/actions/runners/remove-token": { - /** - * Returns a token that you can pass to remove a self-hosted runner from a repository. The token expires after one hour. - * You must authenticate using an access token with the `repo` scope to use this endpoint. - * - * #### Example using remove token - * - * To remove your self-hosted runner from a repository, replace TOKEN with the remove token provided by this endpoint. - * - * ``` - * ./config.sh remove --token TOKEN - * ``` - */ - post: operations["actions/create-remove-token-for-repo"]; - }; - "/repos/{owner}/{repo}/actions/runners/{runner_id}": { - /** - * Gets a specific self-hosted runner configured in a repository. - * - * You must authenticate using an access token with the `repo` scope to use this - * endpoint. - */ - get: operations["actions/get-self-hosted-runner-for-repo"]; - /** - * Forces the removal of a self-hosted runner from a repository. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. - * - * You must authenticate using an access token with the `repo` - * scope to use this endpoint. - */ - delete: operations["actions/delete-self-hosted-runner-from-repo"]; - }; - "/repos/{owner}/{repo}/actions/runs": { - /** - * Lists all workflow runs for a repository. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). - * - * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - get: operations["actions/list-workflow-runs-for-repo"]; - }; - "/repos/{owner}/{repo}/actions/runs/{run_id}": { - /** Gets a specific workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - get: operations["actions/get-workflow-run"]; - /** - * Delete a specific workflow run. Anyone with write access to the repository can use this endpoint. If the repository is - * private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:write` permission to use - * this endpoint. - */ - delete: operations["actions/delete-workflow-run"]; - }; - "/repos/{owner}/{repo}/actions/runs/{run_id}/approvals": { - /** Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - get: operations["actions/get-reviews-for-run"]; - }; - "/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts": { - /** Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - get: operations["actions/list-workflow-run-artifacts"]; - }; - "/repos/{owner}/{repo}/actions/runs/{run_id}/cancel": { - /** Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ - post: operations["actions/cancel-workflow-run"]; - }; - "/repos/{owner}/{repo}/actions/runs/{run_id}/jobs": { - /** Lists jobs for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). */ - get: operations["actions/list-jobs-for-workflow-run"]; - }; - "/repos/{owner}/{repo}/actions/runs/{run_id}/logs": { - /** - * Gets a redirect URL to download an archive of log files for a workflow run. This link expires after 1 minute. Look for - * `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can use - * this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have - * the `actions:read` permission to use this endpoint. - */ - get: operations["actions/download-workflow-run-logs"]; - /** Deletes all logs for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ - delete: operations["actions/delete-workflow-run-logs"]; - }; - "/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments": { - /** - * Get all deployment environments for a workflow run that are waiting for protection rules to pass. - * - * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - get: operations["actions/get-pending-deployments-for-run"]; - /** - * Approve or reject pending deployments that are waiting on approval by a required reviewer. - * - * Anyone with read access to the repository contents and deployments can use this endpoint. - */ - post: operations["actions/review-pending-deployments-for-run"]; - }; - "/repos/{owner}/{repo}/actions/runs/{run_id}/rerun": { - /** Re-runs your workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ - post: operations["actions/re-run-workflow"]; - }; - "/repos/{owner}/{repo}/actions/runs/{run_id}/timing": { - /** - * Gets the number of billable minutes and total run time for a specific workflow run. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - get: operations["actions/get-workflow-run-usage"]; - }; - "/repos/{owner}/{repo}/actions/secrets": { - /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - get: operations["actions/list-repo-secrets"]; - }; - "/repos/{owner}/{repo}/actions/secrets/public-key": { - /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - get: operations["actions/get-repo-public-key"]; - }; - "/repos/{owner}/{repo}/actions/secrets/{secret_name}": { - /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - get: operations["actions/get-repo-secret"]; - /** - * Creates or updates a repository secret with an encrypted value. Encrypt your secret using - * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access - * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use - * this endpoint. - * - * #### Example encrypting a secret using Node.js - * - * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. - * - * ``` - * const sodium = require('tweetsodium'); - * - * const key = "base64-encoded-public-key"; - * const value = "plain-text-secret"; - * - * // Convert the message and key to Uint8Array's (Buffer implements that interface) - * const messageBytes = Buffer.from(value); - * const keyBytes = Buffer.from(key, 'base64'); - * - * // Encrypt using LibSodium. - * const encryptedBytes = sodium.seal(messageBytes, keyBytes); - * - * // Base64 the encrypted secret - * const encrypted = Buffer.from(encryptedBytes).toString('base64'); - * - * console.log(encrypted); - * ``` - * - * - * #### Example encrypting a secret using Python - * - * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/stable/public/#nacl-public-sealedbox) with Python 3. - * - * ``` - * from base64 import b64encode - * from nacl import encoding, public - * - * def encrypt(public_key: str, secret_value: str) -> str: - * """Encrypt a Unicode string using the public key.""" - * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) - * sealed_box = public.SealedBox(public_key) - * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) - * return b64encode(encrypted).decode("utf-8") - * ``` - * - * #### Example encrypting a secret using C# - * - * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. - * - * ``` - * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); - * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); - * - * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); - * - * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); - * ``` - * - * #### Example encrypting a secret using Ruby - * - * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. - * - * ```ruby - * require "rbnacl" - * require "base64" - * - * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") - * public_key = RbNaCl::PublicKey.new(key) - * - * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) - * encrypted_secret = box.encrypt("my_secret") - * - * # Print the base64 encoded secret - * puts Base64.strict_encode64(encrypted_secret) - * ``` - */ - put: operations["actions/create-or-update-repo-secret"]; - /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - delete: operations["actions/delete-repo-secret"]; - }; - "/repos/{owner}/{repo}/actions/workflows": { - /** Lists the workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - get: operations["actions/list-repo-workflows"]; - }; - "/repos/{owner}/{repo}/actions/workflows/{workflow_id}": { - /** Gets a specific workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - get: operations["actions/get-workflow"]; - }; - "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable": { - /** - * Disables a workflow and sets the `state` of the workflow to `disabled_manually`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. - */ - put: operations["actions/disable-workflow"]; - }; - "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches": { - /** - * You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. - * - * You must configure your GitHub Actions workflow to run when the [`workflow_dispatch` webhook](/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch) event occurs. The `inputs` are configured in the workflow file. For more information about how to configure the `workflow_dispatch` event in the workflow file, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows#workflow_dispatch)." - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Creating a personal access token for the command line](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line)." - */ - post: operations["actions/create-workflow-dispatch"]; - }; - "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable": { - /** - * Enables a workflow and sets the `state` of the workflow to `active`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. - */ - put: operations["actions/enable-workflow"]; - }; - "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs": { - /** - * List all workflow runs for a workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). - * - * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. - */ - get: operations["actions/list-workflow-runs"]; - }; - "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing": { - /** - * Gets the number of billable minutes used by a specific workflow during the current billing cycle. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - get: operations["actions/get-workflow-usage"]; - }; - "/repos/{owner}/{repo}/assignees": { - /** Lists the [available assignees](https://help.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) for issues in a repository. */ - get: operations["issues/list-assignees"]; - }; - "/repos/{owner}/{repo}/assignees/{assignee}": { - /** - * Checks if a user has permission to be assigned to an issue in this repository. - * - * If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. - * - * Otherwise a `404` status code is returned. - */ - get: operations["issues/check-user-can-be-assigned"]; - }; - "/repos/{owner}/{repo}/automated-security-fixes": { - /** Enables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://help.github.com/en/articles/configuring-automated-security-fixes)". */ - put: operations["repos/enable-automated-security-fixes"]; - /** Disables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://help.github.com/en/articles/configuring-automated-security-fixes)". */ - delete: operations["repos/disable-automated-security-fixes"]; - }; - "/repos/{owner}/{repo}/branches": { - get: operations["repos/list-branches"]; - }; - "/repos/{owner}/{repo}/branches/{branch}": { - get: operations["repos/get-branch"]; - }; - "/repos/{owner}/{repo}/branches/{branch}/protection": { - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - get: operations["repos/get-branch-protection"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Protecting a branch requires admin or owner permissions to the repository. - * - * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. - * - * **Note**: The list of users, apps, and teams in total is limited to 100 items. - */ - put: operations["repos/update-branch-protection"]; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - delete: operations["repos/delete-branch-protection"]; - }; - "/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": { - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - get: operations["repos/get-admin-branch-protection"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Adding admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. - */ - post: operations["repos/set-admin-branch-protection"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Removing admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. - */ - delete: operations["repos/delete-admin-branch-protection"]; - }; - "/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": { - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - get: operations["repos/get-pull-request-review-protection"]; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - delete: operations["repos/delete-pull-request-review-protection"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Updating pull request review enforcement requires admin or owner permissions to the repository and branch protection to be enabled. - * - * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. - */ - patch: operations["repos/update-pull-request-review-protection"]; - }; - "/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": { - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * When authenticated with admin or owner permissions to the repository, you can use this endpoint to check whether a branch requires signed commits. An enabled status of `true` indicates you must sign commits on this branch. For more information, see [Signing commits with GPG](https://help.github.com/articles/signing-commits-with-gpg) in GitHub Help. - * - * **Note**: You must enable branch protection to require signed commits. - */ - get: operations["repos/get-commit-signature-protection"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * When authenticated with admin or owner permissions to the repository, you can use this endpoint to require signed commits on a branch. You must enable branch protection to require signed commits. - */ - post: operations["repos/create-commit-signature-protection"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * When authenticated with admin or owner permissions to the repository, you can use this endpoint to disable required signed commits on a branch. You must enable branch protection to require signed commits. - */ - delete: operations["repos/delete-commit-signature-protection"]; - }; - "/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": { - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - get: operations["repos/get-status-checks-protection"]; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - delete: operations["repos/remove-status-check-protection"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. - */ - patch: operations["repos/update-status-check-protection"]; - }; - "/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": { - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - get: operations["repos/get-all-status-check-contexts"]; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - put: operations["repos/set-status-check-contexts"]; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - post: operations["repos/add-status-check-contexts"]; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - delete: operations["repos/remove-status-check-contexts"]; - }; - "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions": { - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists who has access to this protected branch. - * - * **Note**: Users, apps, and teams `restrictions` are only available for organization-owned repositories. - */ - get: operations["repos/get-access-restrictions"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Disables the ability to restrict who can push to this branch. - */ - delete: operations["repos/delete-access-restrictions"]; - }; - "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": { - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists the GitHub Apps that have push access to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. - */ - get: operations["repos/get-apps-with-access-to-protected-branch"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Replaces the list of apps that have push access to this branch. This removes all apps that previously had push access and grants push access to the new list of apps. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. - * - * | Type | Description | - * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - put: operations["repos/set-app-access-restrictions"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Grants the specified apps push access for this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. - * - * | Type | Description | - * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - post: operations["repos/add-app-access-restrictions"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Removes the ability of an app to push to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. - * - * | Type | Description | - * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - delete: operations["repos/remove-app-access-restrictions"]; - }; - "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": { - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists the teams who have push access to this branch. The list includes child teams. - */ - get: operations["repos/get-teams-with-access-to-protected-branch"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Replaces the list of teams that have push access to this branch. This removes all teams that previously had push access and grants push access to the new list of teams. Team restrictions include child teams. - * - * | Type | Description | - * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | - * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - put: operations["repos/set-team-access-restrictions"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Grants the specified teams push access for this branch. You can also give push access to child teams. - * - * | Type | Description | - * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | - * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - post: operations["repos/add-team-access-restrictions"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Removes the ability of a team to push to this branch. You can also remove push access for child teams. - * - * | Type | Description | - * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | Teams that should no longer have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - delete: operations["repos/remove-team-access-restrictions"]; - }; - "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": { - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists the people who have push access to this branch. - */ - get: operations["repos/get-users-with-access-to-protected-branch"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Replaces the list of people that have push access to this branch. This removes all people that previously had push access and grants push access to the new list of people. - * - * | Type | Description | - * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | - * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - put: operations["repos/set-user-access-restrictions"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Grants the specified people push access for this branch. - * - * | Type | Description | - * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | - * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - post: operations["repos/add-user-access-restrictions"]; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Removes the ability of a user to push to this branch. - * - * | Type | Description | - * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | Usernames of the people who should no longer have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - delete: operations["repos/remove-user-access-restrictions"]; - }; - "/repos/{owner}/{repo}/branches/{branch}/rename": { - /** - * Renames a branch in a repository. - * - * **Note:** Although the API responds immediately, the branch rename process might take some extra time to complete in the background. You won't be able to push to the old branch name while the rename process is in progress. For more information, see "[Renaming a branch](https://docs.github.com/github/administering-a-repository/renaming-a-branch)". - * - * The permissions required to use this endpoint depends on whether you are renaming the default branch. - * - * To rename a non-default branch: - * - * * Users must have push access. - * * GitHub Apps must have the `contents:write` repository permission. - * - * To rename the default branch: - * - * * Users must have admin or owner permissions. - * * GitHub Apps must have the `administration:write` repository permission. - */ - post: operations["repos/rename-branch"]; - }; - "/repos/{owner}/{repo}/check-runs": { - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Creates a new check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to create check runs. - * - * In a check suite, GitHub limits the number of check runs with the same name to 1000. Once these check runs exceed 1000, GitHub will start to automatically delete older check runs. - */ - post: operations["checks/create"]; - }; - "/repos/{owner}/{repo}/check-runs/{check_run_id}": { - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Gets a single check run using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. - */ - get: operations["checks/get"]; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Updates a check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to edit check runs. - */ - patch: operations["checks/update"]; - }; - "/repos/{owner}/{repo}/check-runs/{check_run_id}/annotations": { - /** Lists annotations for a check run using the annotation `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get annotations for a check run. OAuth Apps and authenticated users must have the `repo` scope to get annotations for a check run in a private repository. */ - get: operations["checks/list-annotations"]; - }; - "/repos/{owner}/{repo}/check-suites": { - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. - * - * By default, check suites are automatically created when you create a [check run](https://docs.github.com/rest/reference/checks#check-runs). You only need to use this endpoint for manually creating check suites when you've disabled automatic creation using "[Update repository preferences for check suites](https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites)". Your GitHub App must have the `checks:write` permission to create check suites. - */ - post: operations["checks/create-suite"]; - }; - "/repos/{owner}/{repo}/check-suites/preferences": { - /** Changes the default automatic flow when creating check suites. By default, a check suite is automatically created each time code is pushed to a repository. When you disable the automatic creation of check suites, you can manually [Create a check suite](https://docs.github.com/rest/reference/checks#create-a-check-suite). You must have admin permissions in the repository to set preferences for check suites. */ - patch: operations["checks/set-suites-preferences"]; - }; - "/repos/{owner}/{repo}/check-suites/{check_suite_id}": { - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. - * - * Gets a single check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. - */ - get: operations["checks/get-suite"]; - }; - "/repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs": { - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Lists check runs for a check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. - */ - get: operations["checks/list-for-suite"]; - }; - "/repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest": { - /** - * Triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. This endpoint will trigger the [`check_suite` webhook](https://docs.github.com/webhooks/event-payloads/#check_suite) event with the action `rerequested`. When a check suite is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. - * - * To rerequest a check suite, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. - */ - post: operations["checks/rerequest-suite"]; - }; - "/repos/{owner}/{repo}/code-scanning/alerts": { - /** - * Lists all open code scanning alerts for the default branch (usually `main` - * or `master`). You must use an access token with the `security_events` scope to use - * this endpoint. GitHub Apps must have the `security_events` read permission to use - * this endpoint. - * - * The response includes a `most_recent_instance` object. - * This provides details of the most recent instance of this alert - * for the default branch or for the specified Git reference - * (if you used `ref` in the request). - */ - get: operations["code-scanning/list-alerts-for-repo"]; - }; - "/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": { - /** - * Gets a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` read permission to use this endpoint. - * - * **Deprecation notice**: - * The instances field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The same information can now be retrieved via a GET request to the URL specified by `instances_url`. - */ - get: operations["code-scanning/get-alert"]; - /** Updates the status of a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint. */ - patch: operations["code-scanning/update-alert"]; - }; - "/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances": { - /** Lists all instances of the specified code scanning alert. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` read permission to use this endpoint. */ - get: operations["code-scanning/list-alerts-instances"]; - }; - "/repos/{owner}/{repo}/code-scanning/analyses": { - /** - * Lists the details of all code scanning analyses for a repository, - * starting with the most recent. - * The response is paginated and you can use the `page` and `per_page` parameters - * to list the analyses you're interested in. - * By default 30 analyses are listed per page. - * - * The `rules_count` field in the response give the number of rules - * that were run in the analysis. - * For very old analyses this data is not available, - * and `0` is returned in this field. - * - * You must use an access token with the `security_events` scope to use this endpoint. - * GitHub Apps must have the `security_events` read permission to use this endpoint. - * - * **Deprecation notice**: - * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. - */ - get: operations["code-scanning/list-recent-analyses"]; - }; - "/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}": { - /** - * Gets a specified code scanning analysis for a repository. - * You must use an access token with the `security_events` scope to use this endpoint. - * GitHub Apps must have the `security_events` read permission to use this endpoint. - * - * The default JSON response contains fields that describe the analysis. - * This includes the Git reference and commit SHA to which the analysis relates, - * the datetime of the analysis, the name of the code scanning tool, - * and the number of alerts. - * - * The `rules_count` field in the default response give the number of rules - * that were run in the analysis. - * For very old analyses this data is not available, - * and `0` is returned in this field. - * - * If you use the Accept header `application/sarif+json`, - * the response contains the analysis data that was uploaded. - * This is formatted as - * [SARIF version 2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html). - * For an example response, see "[Custom media type for code scanning](#custom-media-type-for-code-scanning)." - * - * **Deprecation notice**: - * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. - */ - get: operations["code-scanning/get-analysis"]; - /** - * Deletes a specified code scanning analysis from a repository. For - * private repositories, you must use an access token with the `repo` scope. For public repositories, - * you must use an access token with `public_repo` and `repo:security_events` scopes. - * GitHub Apps must have the `security_events` write permission to use this endpoint. - * - * You can delete one analysis at a time. - * To delete a series of analyses, start with the most recent analysis and work backwards. - * Conceptually, the process is similar to the undo function in a text editor. - * - * When you list the analyses for a repository, - * one or more will be identified as deletable in the response: - * - * ``` - * "deletable": true - * ``` - * - * An analysis is deletable when it's the most recent in a set of analyses. - * Typically, a repository will have multiple sets of analyses - * for each enabled code scanning tool, - * where a set is determined by a unique combination of analysis values: - * - * * `ref` - * * `tool` - * * `analysis_key` - * * `environment` - * - * If you attempt to delete an analysis that is not the most recent in a set, - * you'll get a 400 response with the message: - * - * ``` - * Analysis specified is not deletable. - * ``` - * - * The response from a successful `DELETE` operation provides you with - * two alternative URLs for deleting the next analysis in the set - * (see the example default response below). - * Use the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis - * in the set. This is a useful option if you want to preserve at least one analysis - * for the specified tool in your repository. - * Use the `confirm_delete_url` URL if you are content to remove all analyses for a tool. - * When you delete the last analysis in a set the value of `next_analysis_url` and `confirm_delete_url` - * in the 200 response is `null`. - * - * As an example of the deletion process, - * let's imagine that you added a workflow that configured a particular code scanning tool - * to analyze the code in a repository. This tool has added 15 analyses: - * 10 on the default branch, and another 5 on a topic branch. - * You therefore have two separate sets of analyses for this tool. - * You've now decided that you want to remove all of the analyses for the tool. - * To do this you must make 15 separate deletion requests. - * To start, you must find the deletable analysis for one of the sets, - * step through deleting the analyses in that set, - * and then repeat the process for the second set. - * The procedure therefore consists of a nested loop: - * - * **Outer loop**: - * * List the analyses for the repository, filtered by tool. - * * Parse this list to find a deletable analysis. If found: - * - * **Inner loop**: - * * Delete the identified analysis. - * * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration. - * - * The above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely. - */ - delete: operations["code-scanning/delete-analysis"]; - }; - "/repos/{owner}/{repo}/code-scanning/sarifs": { - /** - * Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint. - * - * There are two places where you can upload code scanning results. - * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/github/finding-security-vulnerabilities-and-errors-in-your-code/triaging-code-scanning-alerts-in-pull-requests)." - * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/github/finding-security-vulnerabilities-and-errors-in-your-code/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." - * - * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: - * - * ``` - * gzip -c analysis-data.sarif | base64 -w0 - * ``` - * - * SARIF upload supports a maximum of 5000 results per analysis run. Any results over this limit are ignored and any SARIF uploads with more than 25,000 results are rejected. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries. - * - * The `202 Accepted`, response includes an `id` value. - * You can use this ID to check the status of the upload by using this for the `/sarifs/{sarif_id}` endpoint. - * For more information, see "[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload)." - */ - post: operations["code-scanning/upload-sarif"]; - }; - "/repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}": { - /** Gets information about a SARIF upload, including the status and the URL of the analysis that was uploaded so that you can retrieve details of the analysis. For more information, see "[Get a code scanning analysis for a repository](/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository)." You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` read permission to use this endpoint. */ - get: operations["code-scanning/get-sarif"]; - }; - "/repos/{owner}/{repo}/collaborators": { - /** - * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. - * - * Team members will include the members of child teams. - */ - get: operations["repos/list-collaborators"]; - }; - "/repos/{owner}/{repo}/collaborators/{username}": { - /** - * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. - * - * Team members will include the members of child teams. - */ - get: operations["repos/check-collaborator"]; - /** - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - * - * For more information the permission levels, see "[Repository permission levels for an organization](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". - * - * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - * - * The invitee will receive a notification that they have been invited to the repository, which they must accept or decline. They may do this via the notifications page, the email they receive, or by using the [repository invitations API endpoints](https://docs.github.com/rest/reference/repos#invitations). - * - * **Rate limits** - * - * To prevent abuse, you are limited to sending 50 invitations to a repository per 24 hour period. Note there is no limit if you are inviting organization members to an organization repository. - */ - put: operations["repos/add-collaborator"]; - delete: operations["repos/remove-collaborator"]; - }; - "/repos/{owner}/{repo}/collaborators/{username}/permission": { - /** Checks the repository permission of a collaborator. The possible repository permissions are `admin`, `write`, `read`, and `none`. */ - get: operations["repos/get-collaborator-permission-level"]; - }; - "/repos/{owner}/{repo}/comments": { - /** - * Commit Comments use [these custom media types](https://docs.github.com/rest/reference/repos#custom-media-types). You can read more about the use of media types in the API [here](https://docs.github.com/rest/overview/media-types/). - * - * Comments are ordered by ascending ID. - */ - get: operations["repos/list-commit-comments-for-repo"]; - }; - "/repos/{owner}/{repo}/comments/{comment_id}": { - get: operations["repos/get-commit-comment"]; - delete: operations["repos/delete-commit-comment"]; - patch: operations["repos/update-commit-comment"]; - }; - "/repos/{owner}/{repo}/comments/{comment_id}/reactions": { - /** List the reactions to a [commit comment](https://docs.github.com/rest/reference/repos#comments). */ - get: operations["reactions/list-for-commit-comment"]; - /** Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this commit comment. */ - post: operations["reactions/create-for-commit-comment"]; - }; - "/repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}": { - /** - * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/comments/:comment_id/reactions/:reaction_id`. - * - * Delete a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). - */ - delete: operations["reactions/delete-for-commit-comment"]; - }; - "/repos/{owner}/{repo}/commits": { - /** - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - get: operations["repos/list-commits"]; - }; - "/repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head": { - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Returns all branches where the given commit SHA is the HEAD, or latest commit for the branch. - */ - get: operations["repos/list-branches-for-head-commit"]; - }; - "/repos/{owner}/{repo}/commits/{commit_sha}/comments": { - /** Use the `:commit_sha` to specify the commit that will have its comments listed. */ - get: operations["repos/list-comments-for-commit"]; - /** - * Create a comment for a commit using its `:commit_sha`. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - post: operations["repos/create-commit-comment"]; - }; - "/repos/{owner}/{repo}/commits/{commit_sha}/pulls": { - /** Lists the merged pull request that introduced the commit to the repository. If the commit is not present in the default branch, additionally returns open pull requests associated with the commit. The results may include open and closed pull requests. Additional preview headers may be required to see certain details for associated pull requests, such as whether a pull request is in a draft state. For more information about previews that might affect this endpoint, see the [List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests) endpoint. */ - get: operations["repos/list-pull-requests-associated-with-commit"]; - }; - "/repos/{owner}/{repo}/commits/{ref}": { - /** - * Returns the contents of a single commit reference. You must have `read` access for the repository to use this endpoint. - * - * **Note:** If there are more than 300 files in the commit diff, the response will include pagination link headers for the remaining files, up to a limit of 3000 files. Each page contains the static commit information, and the only changes are to the file listing. - * - * You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch `diff` and `patch` formats. Diffs with binary data will have no `patch` property. - * - * To return only the SHA-1 hash of the commit reference, you can provide the `sha` custom [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) in the `Accept` header. You can use this endpoint to check if a remote reference's SHA-1 hash is the same as your local reference's SHA-1 hash by providing the local SHA-1 reference as the ETag. - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - get: operations["repos/get-commit"]; - }; - "/repos/{owner}/{repo}/commits/{ref}/check-runs": { - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Lists check runs for a commit ref. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. - */ - get: operations["checks/list-for-ref"]; - }; - "/repos/{owner}/{repo}/commits/{ref}/check-suites": { - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. - * - * Lists check suites for a commit `ref`. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to list check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. - */ - get: operations["checks/list-suites-for-ref"]; - }; - "/repos/{owner}/{repo}/commits/{ref}/status": { - /** - * Users with pull access in a repository can access a combined view of commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. - * - * The most recent status for each context is returned, up to 100. This field [paginates](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination) if there are over 100 contexts. - * - * Additionally, a combined `state` is returned. The `state` is one of: - * - * * **failure** if any of the contexts report as `error` or `failure` - * * **pending** if there are no statuses or a context is `pending` - * * **success** if the latest status for all contexts is `success` - */ - get: operations["repos/get-combined-status-for-ref"]; - }; - "/repos/{owner}/{repo}/commits/{ref}/statuses": { - /** - * Users with pull access in a repository can view commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. Statuses are returned in reverse chronological order. The first status in the list will be the latest one. - * - * This resource is also available via a legacy route: `GET /repos/:owner/:repo/statuses/:ref`. - */ - get: operations["repos/list-commit-statuses-for-ref"]; - }; - "/repos/{owner}/{repo}/community/code_of_conduct": { - /** - * Returns the contents of the repository's code of conduct file, if one is detected. - * - * A code of conduct is detected if there is a file named `CODE_OF_CONDUCT` in the root directory of the repository. GitHub detects which code of conduct it is using fuzzy matching. - */ - get: operations["codes-of-conduct/get-for-repo"]; - }; - "/repos/{owner}/{repo}/community/profile": { - /** - * This endpoint will return all community profile metrics, including an - * overall health score, repository description, the presence of documentation, detected - * code of conduct, detected license, and the presence of ISSUE\_TEMPLATE, PULL\_REQUEST\_TEMPLATE, - * README, and CONTRIBUTING files. - * - * The `health_percentage` score is defined as a percentage of how many of - * these four documents are present: README, CONTRIBUTING, LICENSE, and - * CODE_OF_CONDUCT. For example, if all four documents are present, then - * the `health_percentage` is `100`. If only one is present, then the - * `health_percentage` is `25`. - * - * `content_reports_enabled` is only returned for organization-owned repositories. - */ - get: operations["repos/get-community-profile-metrics"]; - }; - "/repos/{owner}/{repo}/contents/{path}": { - /** - * Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit - * `:path`, you will receive the contents of the repository's root directory. See the description below regarding what the API response includes for directories. - * - * Files and symlinks support [a custom media type](https://docs.github.com/rest/reference/repos#custom-media-types) for - * retrieving the raw content or rendered HTML (when supported). All content types support [a custom media - * type](https://docs.github.com/rest/reference/repos#custom-media-types) to ensure the content is returned in a consistent - * object format. - * - * **Note**: - * * To get a repository's contents recursively, you can [recursively get the tree](https://docs.github.com/rest/reference/git#trees). - * * This API has an upper limit of 1,000 files for a directory. If you need to retrieve more files, use the [Git Trees - * API](https://docs.github.com/rest/reference/git#get-a-tree). - * * This API supports files up to 1 megabyte in size. - * - * #### If the content is a directory - * The response will be an array of objects, one object for each item in the directory. - * When listing the contents of a directory, submodules have their "type" specified as "file". Logically, the value - * _should_ be "submodule". This behavior exists in API v3 [for backwards compatibility purposes](https://git.io/v1YCW). - * In the next major version of the API, the type will be returned as "submodule". - * - * #### If the content is a symlink - * If the requested `:path` points to a symlink, and the symlink's target is a normal file in the repository, then the - * API responds with the content of the file (in the format shown in the example. Otherwise, the API responds with an object - * describing the symlink itself. - * - * #### If the content is a submodule - * The `submodule_git_url` identifies the location of the submodule repository, and the `sha` identifies a specific - * commit within the submodule repository. Git uses the given URL when cloning the submodule repository, and checks out - * the submodule at that specific commit. - * - * If the submodule repository is not hosted on github.com, the Git URLs (`git_url` and `_links["git"]`) and the - * github.com URLs (`html_url` and `_links["html"]`) will have null values. - */ - get: operations["repos/get-content"]; - /** Creates a new file or replaces an existing file in a repository. */ - put: operations["repos/create-or-update-file-contents"]; - /** - * Deletes a file in a repository. - * - * You can provide an additional `committer` parameter, which is an object containing information about the committer. Or, you can provide an `author` parameter, which is an object containing information about the author. - * - * The `author` section is optional and is filled in with the `committer` information if omitted. If the `committer` information is omitted, the authenticated user's information is used. - * - * You must provide values for both `name` and `email`, whether you choose to use `author` or `committer`. Otherwise, you'll receive a `422` status code. - */ - delete: operations["repos/delete-file"]; - }; - "/repos/{owner}/{repo}/contributors": { - /** - * Lists contributors to the specified repository and sorts them by the number of commits per contributor in descending order. This endpoint may return information that is a few hours old because the GitHub REST API v3 caches contributor data to improve performance. - * - * GitHub identifies contributors by author email address. This endpoint groups contribution counts by GitHub user, which includes all associated email addresses. To improve performance, only the first 500 author email addresses in the repository link to GitHub users. The rest will appear as anonymous contributors without associated GitHub user information. - */ - get: operations["repos/list-contributors"]; - }; - "/repos/{owner}/{repo}/deployments": { - /** Simple filtering of deployments is available via query parameters: */ - get: operations["repos/list-deployments"]; - /** - * Deployments offer a few configurable parameters with certain defaults. - * - * The `ref` parameter can be any named branch, tag, or SHA. At GitHub we often deploy branches and verify them - * before we merge a pull request. - * - * The `environment` parameter allows deployments to be issued to different runtime environments. Teams often have - * multiple environments for verifying their applications, such as `production`, `staging`, and `qa`. This parameter - * makes it easier to track which environments have requested deployments. The default environment is `production`. - * - * The `auto_merge` parameter is used to ensure that the requested ref is not behind the repository's default branch. If - * the ref _is_ behind the default branch for the repository, we will attempt to merge it for you. If the merge succeeds, - * the API will return a successful merge commit. If merge conflicts prevent the merge from succeeding, the API will - * return a failure response. - * - * By default, [commit statuses](https://docs.github.com/rest/reference/repos#statuses) for every submitted context must be in a `success` - * state. The `required_contexts` parameter allows you to specify a subset of contexts that must be `success`, or to - * specify contexts that have not yet been submitted. You are not required to use commit statuses to deploy. If you do - * not require any contexts or create any commit statuses, the deployment will always succeed. - * - * The `payload` parameter is available for any extra information that a deployment system might need. It is a JSON text - * field that will be passed on when a deployment event is dispatched. - * - * The `task` parameter is used by the deployment system to allow different execution paths. In the web world this might - * be `deploy:migrations` to run schema changes on the system. In the compiled world this could be a flag to compile an - * application with debugging enabled. - * - * Users with `repo` or `repo_deployment` scopes can create a deployment for a given ref. - * - * #### Merged branch response - * You will see this response when GitHub automatically merges the base branch into the topic branch instead of creating - * a deployment. This auto-merge happens when: - * * Auto-merge option is enabled in the repository - * * Topic branch does not include the latest changes on the base branch, which is `master` in the response example - * * There are no merge conflicts - * - * If there are no new commits in the base branch, a new request to create a deployment should give a successful - * response. - * - * #### Merge conflict response - * This error happens when the `auto_merge` option is enabled and when the default branch (in this case `master`), can't - * be merged into the branch that's being deployed (in this case `topic-branch`), due to merge conflicts. - * - * #### Failed commit status checks - * This error happens when the `required_contexts` parameter indicates that one or more contexts need to have a `success` - * status for the commit to be deployed, but one or more of the required contexts do not have a state of `success`. - */ - post: operations["repos/create-deployment"]; - }; - "/repos/{owner}/{repo}/deployments/{deployment_id}": { - get: operations["repos/get-deployment"]; - /** - * To ensure there can always be an active deployment, you can only delete an _inactive_ deployment. Anyone with `repo` or `repo_deployment` scopes can delete an inactive deployment. - * - * To set a deployment as inactive, you must: - * - * * Create a new deployment that is active so that the system has a record of the current state, then delete the previously active deployment. - * * Mark the active deployment as inactive by adding any non-successful deployment status. - * - * For more information, see "[Create a deployment](https://docs.github.com/rest/reference/repos/#create-a-deployment)" and "[Create a deployment status](https://docs.github.com/rest/reference/repos#create-a-deployment-status)." - */ - delete: operations["repos/delete-deployment"]; - }; - "/repos/{owner}/{repo}/deployments/{deployment_id}/statuses": { - /** Users with pull access can view deployment statuses for a deployment: */ - get: operations["repos/list-deployment-statuses"]; - /** - * Users with `push` access can create deployment statuses for a given deployment. - * - * GitHub Apps require `read & write` access to "Deployments" and `read-only` access to "Repo contents" (for private repos). OAuth Apps require the `repo_deployment` scope. - */ - post: operations["repos/create-deployment-status"]; - }; - "/repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}": { - /** Users with pull access can view a deployment status for a deployment: */ - get: operations["repos/get-deployment-status"]; - }; - "/repos/{owner}/{repo}/dispatches": { - /** - * You can use this endpoint to trigger a webhook event called `repository_dispatch` when you want activity that happens outside of GitHub to trigger a GitHub Actions workflow or GitHub App webhook. You must configure your GitHub Actions workflow or GitHub App to run when the `repository_dispatch` event occurs. For an example `repository_dispatch` webhook payload, see "[RepositoryDispatchEvent](https://docs.github.com/webhooks/event-payloads/#repository_dispatch)." - * - * The `client_payload` parameter is available for any extra information that your workflow might need. This parameter is a JSON payload that will be passed on when the webhook event is dispatched. For example, the `client_payload` can include a message that a user would like to send using a GitHub Actions workflow. Or the `client_payload` can be used as a test to debug your workflow. - * - * This endpoint requires write access to the repository by providing either: - * - * - Personal access tokens with `repo` scope. For more information, see "[Creating a personal access token for the command line](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line)" in the GitHub Help documentation. - * - GitHub Apps with both `metadata:read` and `contents:read&write` permissions. - * - * This input example shows how you can use the `client_payload` as a test to debug your workflow. - */ - post: operations["repos/create-dispatch-event"]; - }; - "/repos/{owner}/{repo}/environments": { - /** - * Get all environments for a repository. - * - * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - get: operations["repos/get-all-environments"]; - }; - "/repos/{owner}/{repo}/environments/{environment_name}": { - /** Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - get: operations["repos/get-environment"]; - /** - * Create or update an environment with protection rules, such as required reviewers. For more information about environment protection rules, see "[Environments](/actions/reference/environments#environment-protection-rules)." - * - * **Note:** Although you can use this operation to specify that only branches that match specified name patterns can deploy to this environment, you must use the UI to set the name patterns. For more information, see "[Environments](/actions/reference/environments#deployment-branches)." - * - * **Note:** To create or update secrets for an environment, see "[Secrets](/rest/reference/actions#secrets)." - * - * You must authenticate using an access token with the repo scope to use this endpoint. - */ - put: operations["repos/create-or-update-environment"]; - /** You must authenticate using an access token with the repo scope to use this endpoint. */ - delete: operations["repos/delete-an-environment"]; - }; - "/repos/{owner}/{repo}/events": { - get: operations["activity/list-repo-events"]; - }; - "/repos/{owner}/{repo}/forks": { - get: operations["repos/list-forks"]; - /** - * Create a fork for the authenticated user. - * - * **Note**: Forking a Repository happens asynchronously. You may have to wait a short period of time before you can access the git objects. If this takes longer than 5 minutes, be sure to contact [GitHub Support](https://support.github.com/contact) or [GitHub Premium Support](https://premium.githubsupport.com). - */ - post: operations["repos/create-fork"]; - }; - "/repos/{owner}/{repo}/git/blobs": { - post: operations["git/create-blob"]; - }; - "/repos/{owner}/{repo}/git/blobs/{file_sha}": { - /** - * The `content` in the response will always be Base64 encoded. - * - * _Note_: This API supports blobs up to 100 megabytes in size. - */ - get: operations["git/get-blob"]; - }; - "/repos/{owner}/{repo}/git/commits": { - /** - * Creates a new Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - post: operations["git/create-commit"]; - }; - "/repos/{owner}/{repo}/git/commits/{commit_sha}": { - /** - * Gets a Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - get: operations["git/get-commit"]; - }; - "/repos/{owner}/{repo}/git/matching-refs/{ref}": { - /** - * Returns an array of references from your Git database that match the supplied name. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't exist in the repository, but existing refs start with `:ref`, they will be returned as an array. - * - * When you use this endpoint without providing a `:ref`, it will return an array of all the references from your Git database, including notes and stashes if they exist on the server. Anything in the namespace is returned, not just `heads` and `tags`. - * - * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". - * - * If you request matching references for a branch named `feature` but the branch `feature` doesn't exist, the response can still include other matching head refs that start with the word `feature`, such as `featureA` and `featureB`. - */ - get: operations["git/list-matching-refs"]; - }; - "/repos/{owner}/{repo}/git/ref/{ref}": { - /** - * Returns a single reference from your Git database. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't match an existing ref, a `404` is returned. - * - * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". - */ - get: operations["git/get-ref"]; - }; - "/repos/{owner}/{repo}/git/refs": { - /** Creates a reference for your repository. You are unable to create new references for empty repositories, even if the commit SHA-1 hash used exists. Empty repositories are repositories without branches. */ - post: operations["git/create-ref"]; - }; - "/repos/{owner}/{repo}/git/refs/{ref}": { - delete: operations["git/delete-ref"]; - patch: operations["git/update-ref"]; - }; - "/repos/{owner}/{repo}/git/tags": { - /** - * Note that creating a tag object does not create the reference that makes a tag in Git. If you want to create an annotated tag in Git, you have to do this call to create the tag object, and then [create](https://docs.github.com/rest/reference/git#create-a-reference) the `refs/tags/[tag]` reference. If you want to create a lightweight tag, you only have to [create](https://docs.github.com/rest/reference/git#create-a-reference) the tag reference - this call would be unnecessary. - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - post: operations["git/create-tag"]; - }; - "/repos/{owner}/{repo}/git/tags/{tag_sha}": { - /** - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - get: operations["git/get-tag"]; - }; - "/repos/{owner}/{repo}/git/trees": { - /** - * The tree creation API accepts nested entries. If you specify both a tree and a nested path modifying that tree, this endpoint will overwrite the contents of the tree with the new path contents, and create a new tree structure. - * - * If you use this endpoint to add, delete, or modify the file contents in a tree, you will need to commit the tree and then update a branch to point to the commit. For more information see "[Create a commit](https://docs.github.com/rest/reference/git#create-a-commit)" and "[Update a reference](https://docs.github.com/rest/reference/git#update-a-reference)." - */ - post: operations["git/create-tree"]; - }; - "/repos/{owner}/{repo}/git/trees/{tree_sha}": { - /** - * Returns a single tree using the SHA1 value for that tree. - * - * If `truncated` is `true` in the response then the number of items in the `tree` array exceeded our maximum limit. If you need to fetch more items, use the non-recursive method of fetching trees, and fetch one sub-tree at a time. - */ - get: operations["git/get-tree"]; - }; - "/repos/{owner}/{repo}/hooks": { - get: operations["repos/list-webhooks"]; - /** - * Repositories can have multiple webhooks installed. Each webhook should have a unique `config`. Multiple webhooks can - * share the same `config` as long as those webhooks do not have any `events` that overlap. - */ - post: operations["repos/create-webhook"]; - }; - "/repos/{owner}/{repo}/hooks/{hook_id}": { - /** Returns a webhook configured in a repository. To get only the webhook `config` properties, see "[Get a webhook configuration for a repository](/rest/reference/repos#get-a-webhook-configuration-for-a-repository)." */ - get: operations["repos/get-webhook"]; - delete: operations["repos/delete-webhook"]; - /** Updates a webhook configured in a repository. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for a repository](/rest/reference/repos#update-a-webhook-configuration-for-a-repository)." */ - patch: operations["repos/update-webhook"]; - }; - "/repos/{owner}/{repo}/hooks/{hook_id}/config": { - /** - * Returns the webhook configuration for a repository. To get more information about the webhook, including the `active` state and `events`, use "[Get a repository webhook](/rest/reference/orgs#get-a-repository-webhook)." - * - * Access tokens must have the `read:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:read` permission. - */ - get: operations["repos/get-webhook-config-for-repo"]; - /** - * Updates the webhook configuration for a repository. To update more information about the webhook, including the `active` state and `events`, use "[Update a repository webhook](/rest/reference/orgs#update-a-repository-webhook)." - * - * Access tokens must have the `write:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:write` permission. - */ - patch: operations["repos/update-webhook-config-for-repo"]; - }; - "/repos/{owner}/{repo}/hooks/{hook_id}/pings": { - /** This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. */ - post: operations["repos/ping-webhook"]; - }; - "/repos/{owner}/{repo}/hooks/{hook_id}/tests": { - /** - * This will trigger the hook with the latest push to the current repository if the hook is subscribed to `push` events. If the hook is not subscribed to `push` events, the server will respond with 204 but no test POST will be generated. - * - * **Note**: Previously `/repos/:owner/:repo/hooks/:hook_id/test` - */ - post: operations["repos/test-push-webhook"]; - }; - "/repos/{owner}/{repo}/import": { - /** - * View the progress of an import. - * - * **Import status** - * - * This section includes details about the possible values of the `status` field of the Import Progress response. - * - * An import that does not have errors will progress through these steps: - * - * * `detecting` - the "detection" step of the import is in progress because the request did not include a `vcs` parameter. The import is identifying the type of source control present at the URL. - * * `importing` - the "raw" step of the import is in progress. This is where commit data is fetched from the original repository. The import progress response will include `commit_count` (the total number of raw commits that will be imported) and `percent` (0 - 100, the current progress through the import). - * * `mapping` - the "rewrite" step of the import is in progress. This is where SVN branches are converted to Git branches, and where author updates are applied. The import progress response does not include progress information. - * * `pushing` - the "push" step of the import is in progress. This is where the importer updates the repository on GitHub. The import progress response will include `push_percent`, which is the percent value reported by `git push` when it is "Writing objects". - * * `complete` - the import is complete, and the repository is ready on GitHub. - * - * If there are problems, you will see one of these in the `status` field: - * - * * `auth_failed` - the import requires authentication in order to connect to the original repository. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. - * * `error` - the import encountered an error. The import progress response will include the `failed_step` and an error message. Contact [GitHub Support](https://support.github.com/contact) or [GitHub Premium Support](https://premium.githubsupport.com) for more information. - * * `detection_needs_auth` - the importer requires authentication for the originating repository to continue detection. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. - * * `detection_found_nothing` - the importer didn't recognize any source control at the URL. To resolve, [Cancel the import](https://docs.github.com/rest/reference/migrations#cancel-an-import) and [retry](https://docs.github.com/rest/reference/migrations#start-an-import) with the correct URL. - * * `detection_found_multiple` - the importer found several projects or repositories at the provided URL. When this is the case, the Import Progress response will also include a `project_choices` field with the possible project choices as values. To update project choice, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. - * - * **The project_choices field** - * - * When multiple projects are found at the provided URL, the response hash will include a `project_choices` field, the value of which is an array of hashes each representing a project choice. The exact key/value pairs of the project hashes will differ depending on the version control type. - * - * **Git LFS related fields** - * - * This section includes details about Git LFS related fields that may be present in the Import Progress response. - * - * * `use_lfs` - describes whether the import has been opted in or out of using Git LFS. The value can be `opt_in`, `opt_out`, or `undecided` if no action has been taken. - * * `has_large_files` - the boolean value describing whether files larger than 100MB were found during the `importing` step. - * * `large_files_size` - the total size in gigabytes of files larger than 100MB found in the originating repository. - * * `large_files_count` - the total number of files larger than 100MB found in the originating repository. To see a list of these files, make a "Get Large Files" request. - */ - get: operations["migrations/get-import-status"]; - /** Start a source import to a GitHub repository using GitHub Importer. */ - put: operations["migrations/start-import"]; - /** Stop an import for a repository. */ - delete: operations["migrations/cancel-import"]; - /** - * An import can be updated with credentials or a project choice by passing in the appropriate parameters in this API - * request. If no parameters are provided, the import will be restarted. - */ - patch: operations["migrations/update-import"]; - }; - "/repos/{owner}/{repo}/import/authors": { - /** - * Each type of source control system represents authors in a different way. For example, a Git commit author has a display name and an email address, but a Subversion commit author just has a username. The GitHub Importer will make the author information valid, but the author might not be correct. For example, it will change the bare Subversion username `hubot` into something like `hubot `. - * - * This endpoint and the [Map a commit author](https://docs.github.com/rest/reference/migrations#map-a-commit-author) endpoint allow you to provide correct Git author information. - */ - get: operations["migrations/get-commit-authors"]; - }; - "/repos/{owner}/{repo}/import/authors/{author_id}": { - /** Update an author's identity for the import. Your application can continue updating authors any time before you push new commits to the repository. */ - patch: operations["migrations/map-commit-author"]; - }; - "/repos/{owner}/{repo}/import/large_files": { - /** List files larger than 100MB found during the import */ - get: operations["migrations/get-large-files"]; - }; - "/repos/{owner}/{repo}/import/lfs": { - /** You can import repositories from Subversion, Mercurial, and TFS that include files larger than 100MB. This ability is powered by [Git LFS](https://git-lfs.github.com). You can learn more about our LFS feature and working with large files [on our help site](https://help.github.com/articles/versioning-large-files/). */ - patch: operations["migrations/set-lfs-preference"]; - }; - "/repos/{owner}/{repo}/installation": { - /** - * Enables an authenticated GitHub App to find the repository's installation information. The installation's account type will be either an organization or a user account, depending which account the repository belongs to. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - get: operations["apps/get-repo-installation"]; - }; - "/repos/{owner}/{repo}/interaction-limits": { - /** Shows which type of GitHub user can interact with this repository and when the restriction expires. If there are no restrictions, you will see an empty response. */ - get: operations["interactions/get-restrictions-for-repo"]; - /** Temporarily restricts interactions to a certain type of GitHub user within the given repository. You must have owner or admin access to set these restrictions. If an interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. */ - put: operations["interactions/set-restrictions-for-repo"]; - /** Removes all interaction restrictions from the given repository. You must have owner or admin access to remove restrictions. If the interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. */ - delete: operations["interactions/remove-restrictions-for-repo"]; - }; - "/repos/{owner}/{repo}/invitations": { - /** When authenticating as a user with admin rights to a repository, this endpoint will list all currently open repository invitations. */ - get: operations["repos/list-invitations"]; - }; - "/repos/{owner}/{repo}/invitations/{invitation_id}": { - delete: operations["repos/delete-invitation"]; - patch: operations["repos/update-invitation"]; - }; - "/repos/{owner}/{repo}/issues": { - /** - * List issues in a repository. - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - get: operations["issues/list-for-repo"]; - /** - * Any user with pull access to a repository can create an issue. If [issues are disabled in the repository](https://help.github.com/articles/disabling-issues/), the API returns a `410 Gone` status. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details. - */ - post: operations["issues/create"]; - }; - "/repos/{owner}/{repo}/issues/comments": { - /** By default, Issue Comments are ordered by ascending ID. */ - get: operations["issues/list-comments-for-repo"]; - }; - "/repos/{owner}/{repo}/issues/comments/{comment_id}": { - get: operations["issues/get-comment"]; - delete: operations["issues/delete-comment"]; - patch: operations["issues/update-comment"]; - }; - "/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": { - /** List the reactions to an [issue comment](https://docs.github.com/rest/reference/issues#comments). */ - get: operations["reactions/list-for-issue-comment"]; - /** Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this issue comment. */ - post: operations["reactions/create-for-issue-comment"]; - }; - "/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}": { - /** - * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/issues/comments/:comment_id/reactions/:reaction_id`. - * - * Delete a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). - */ - delete: operations["reactions/delete-for-issue-comment"]; - }; - "/repos/{owner}/{repo}/issues/events": { - get: operations["issues/list-events-for-repo"]; - }; - "/repos/{owner}/{repo}/issues/events/{event_id}": { - get: operations["issues/get-event"]; - }; - "/repos/{owner}/{repo}/issues/{issue_number}": { - /** - * The API returns a [`301 Moved Permanently` status](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-redirects-redirects) if the issue was - * [transferred](https://help.github.com/articles/transferring-an-issue-to-another-repository/) to another repository. If - * the issue was transferred to or deleted from a repository where the authenticated user lacks read access, the API - * returns a `404 Not Found` status. If the issue was deleted from a repository where the authenticated user has read - * access, the API returns a `410 Gone` status. To receive webhook events for transferred and deleted issues, subscribe - * to the [`issues`](https://docs.github.com/webhooks/event-payloads/#issues) webhook. - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - get: operations["issues/get"]; - /** Issue owners and users with push access can edit an issue. */ - patch: operations["issues/update"]; - }; - "/repos/{owner}/{repo}/issues/{issue_number}/assignees": { - /** Adds up to 10 assignees to an issue. Users already assigned to an issue are not replaced. */ - post: operations["issues/add-assignees"]; - /** Removes one or more assignees from an issue. */ - delete: operations["issues/remove-assignees"]; - }; - "/repos/{owner}/{repo}/issues/{issue_number}/comments": { - /** Issue Comments are ordered by ascending ID. */ - get: operations["issues/list-comments"]; - /** This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details. */ - post: operations["issues/create-comment"]; - }; - "/repos/{owner}/{repo}/issues/{issue_number}/events": { - get: operations["issues/list-events"]; - }; - "/repos/{owner}/{repo}/issues/{issue_number}/labels": { - get: operations["issues/list-labels-on-issue"]; - /** Removes any previous labels and sets the new labels for an issue. */ - put: operations["issues/set-labels"]; - post: operations["issues/add-labels"]; - delete: operations["issues/remove-all-labels"]; - }; - "/repos/{owner}/{repo}/issues/{issue_number}/labels/{name}": { - /** Removes the specified label from the issue, and returns the remaining labels on the issue. This endpoint returns a `404 Not Found` status if the label does not exist. */ - delete: operations["issues/remove-label"]; - }; - "/repos/{owner}/{repo}/issues/{issue_number}/lock": { - /** - * Users with push access can lock an issue or pull request's conversation. - * - * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - put: operations["issues/lock"]; - /** Users with push access can unlock an issue's conversation. */ - delete: operations["issues/unlock"]; - }; - "/repos/{owner}/{repo}/issues/{issue_number}/reactions": { - /** List the reactions to an [issue](https://docs.github.com/rest/reference/issues). */ - get: operations["reactions/list-for-issue"]; - /** Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with a `Status: 200 OK` means that you already added the reaction type to this issue. */ - post: operations["reactions/create-for-issue"]; - }; - "/repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}": { - /** - * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/issues/:issue_number/reactions/:reaction_id`. - * - * Delete a reaction to an [issue](https://docs.github.com/rest/reference/issues/). - */ - delete: operations["reactions/delete-for-issue"]; - }; - "/repos/{owner}/{repo}/issues/{issue_number}/timeline": { - get: operations["issues/list-events-for-timeline"]; - }; - "/repos/{owner}/{repo}/keys": { - get: operations["repos/list-deploy-keys"]; - /** You can create a read-only deploy key. */ - post: operations["repos/create-deploy-key"]; - }; - "/repos/{owner}/{repo}/keys/{key_id}": { - get: operations["repos/get-deploy-key"]; - /** Deploy keys are immutable. If you need to update a key, remove the key and create a new one instead. */ - delete: operations["repos/delete-deploy-key"]; - }; - "/repos/{owner}/{repo}/labels": { - get: operations["issues/list-labels-for-repo"]; - post: operations["issues/create-label"]; - }; - "/repos/{owner}/{repo}/labels/{name}": { - get: operations["issues/get-label"]; - delete: operations["issues/delete-label"]; - patch: operations["issues/update-label"]; - }; - "/repos/{owner}/{repo}/languages": { - /** Lists languages for the specified repository. The value shown for each language is the number of bytes of code written in that language. */ - get: operations["repos/list-languages"]; - }; - "/repos/{owner}/{repo}/license": { - /** - * This method returns the contents of the repository's license file, if one is detected. - * - * Similar to [Get repository content](https://docs.github.com/rest/reference/repos#get-repository-content), this method also supports [custom media types](https://docs.github.com/rest/overview/media-types) for retrieving the raw license content or rendered license HTML. - */ - get: operations["licenses/get-for-repo"]; - }; - "/repos/{owner}/{repo}/merges": { - post: operations["repos/merge"]; - }; - "/repos/{owner}/{repo}/milestones": { - get: operations["issues/list-milestones"]; - post: operations["issues/create-milestone"]; - }; - "/repos/{owner}/{repo}/milestones/{milestone_number}": { - get: operations["issues/get-milestone"]; - delete: operations["issues/delete-milestone"]; - patch: operations["issues/update-milestone"]; - }; - "/repos/{owner}/{repo}/milestones/{milestone_number}/labels": { - get: operations["issues/list-labels-for-milestone"]; - }; - "/repos/{owner}/{repo}/notifications": { - /** List all notifications for the current user. */ - get: operations["activity/list-repo-notifications-for-authenticated-user"]; - /** Marks all notifications in a repository as "read" removes them from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List repository notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. */ - put: operations["activity/mark-repo-notifications-as-read"]; - }; - "/repos/{owner}/{repo}/pages": { - get: operations["repos/get-pages"]; - /** Updates information for a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). */ - put: operations["repos/update-information-about-pages-site"]; - /** Configures a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages)." */ - post: operations["repos/create-pages-site"]; - delete: operations["repos/delete-pages-site"]; - }; - "/repos/{owner}/{repo}/pages/builds": { - get: operations["repos/list-pages-builds"]; - /** - * You can request that your site be built from the latest revision on the default branch. This has the same effect as pushing a commit to your default branch, but does not require an additional commit. Manually triggering page builds can be helpful when diagnosing build warnings and failures. - * - * Build requests are limited to one concurrent build per repository and one concurrent build per requester. If you request a build while another is still in progress, the second request will be queued until the first completes. - */ - post: operations["repos/request-pages-build"]; - }; - "/repos/{owner}/{repo}/pages/builds/latest": { - get: operations["repos/get-latest-pages-build"]; - }; - "/repos/{owner}/{repo}/pages/builds/{build_id}": { - get: operations["repos/get-pages-build"]; - }; - "/repos/{owner}/{repo}/pages/health": { - /** - * Gets a health check of the DNS settings for the `CNAME` record configured for a repository's GitHub Pages. - * - * The first request to this endpoint returns a `202 Accepted` status and starts an asynchronous background task to get the results for the domain. After the background task completes, subsequent requests to this endpoint return a `200 OK` status with the health check results in the response. - * - * Users must have admin or owner permissions. GitHub Apps must have the `pages:write` and `administration:write` permission to use this endpoint. - */ - get: operations["repos/get-pages-health-check"]; - }; - "/repos/{owner}/{repo}/projects": { - /** Lists the projects in a repository. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ - get: operations["projects/list-for-repo"]; - /** Creates a repository project board. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ - post: operations["projects/create-for-repo"]; - }; - "/repos/{owner}/{repo}/pulls": { - /** Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - get: operations["pulls/list"]; - /** - * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. - * - * You can create a new pull request. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - post: operations["pulls/create"]; - }; - "/repos/{owner}/{repo}/pulls/comments": { - /** Lists review comments for all pull requests in a repository. By default, review comments are in ascending order by ID. */ - get: operations["pulls/list-review-comments-for-repo"]; - }; - "/repos/{owner}/{repo}/pulls/comments/{comment_id}": { - /** Provides details for a review comment. */ - get: operations["pulls/get-review-comment"]; - /** Deletes a review comment. */ - delete: operations["pulls/delete-review-comment"]; - /** Enables you to edit a review comment. */ - patch: operations["pulls/update-review-comment"]; - }; - "/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": { - /** List the reactions to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). */ - get: operations["reactions/list-for-pull-request-review-comment"]; - /** Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this pull request review comment. */ - post: operations["reactions/create-for-pull-request-review-comment"]; - }; - "/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}": { - /** - * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/pulls/comments/:comment_id/reactions/:reaction_id.` - * - * Delete a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). - */ - delete: operations["reactions/delete-for-pull-request-comment"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}": { - /** - * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists details of a pull request by providing its number. - * - * When you get, [create](https://docs.github.com/rest/reference/pulls/#create-a-pull-request), or [edit](https://docs.github.com/rest/reference/pulls#update-a-pull-request) a pull request, GitHub creates a merge commit to test whether the pull request can be automatically merged into the base branch. This test commit is not added to the base branch or the head branch. You can review the status of the test commit using the `mergeable` key. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". - * - * The value of the `mergeable` attribute can be `true`, `false`, or `null`. If the value is `null`, then GitHub has started a background job to compute the mergeability. After giving the job time to complete, resubmit the request. When the job finishes, you will see a non-`null` value for the `mergeable` attribute in the response. If `mergeable` is `true`, then `merge_commit_sha` will be the SHA of the _test_ merge commit. - * - * The value of the `merge_commit_sha` attribute changes depending on the state of the pull request. Before merging a pull request, the `merge_commit_sha` attribute holds the SHA of the _test_ merge commit. After merging a pull request, the `merge_commit_sha` attribute changes depending on how you merged the pull request: - * - * * If merged as a [merge commit](https://help.github.com/articles/about-merge-methods-on-github/), `merge_commit_sha` represents the SHA of the merge commit. - * * If merged via a [squash](https://help.github.com/articles/about-merge-methods-on-github/#squashing-your-merge-commits), `merge_commit_sha` represents the SHA of the squashed commit on the base branch. - * * If [rebased](https://help.github.com/articles/about-merge-methods-on-github/#rebasing-and-merging-your-commits), `merge_commit_sha` represents the commit that the base branch was updated to. - * - * Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. - */ - get: operations["pulls/get"]; - /** - * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. - */ - patch: operations["pulls/update"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}/comments": { - /** Lists all review comments for a pull request. By default, review comments are in ascending order by ID. */ - get: operations["pulls/list-review-comments"]; - /** - * Creates a review comment in the pull request diff. To add a regular comment to a pull request timeline, see "[Create an issue comment](https://docs.github.com/rest/reference/issues#create-an-issue-comment)." We recommend creating a review comment using `line`, `side`, and optionally `start_line` and `start_side` if your comment applies to more than one line in the pull request diff. - * - * You can still create a review comment using the `position` parameter. When you use `position`, the `line`, `side`, `start_line`, and `start_side` parameters are not required. For more information, see the [`comfort-fade` preview notice](https://docs.github.com/rest/reference/pulls#create-a-review-comment-for-a-pull-request-preview-notices). - * - * **Note:** The position value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - post: operations["pulls/create-review-comment"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies": { - /** - * Creates a reply to a review comment for a pull request. For the `comment_id`, provide the ID of the review comment you are replying to. This must be the ID of a _top-level review comment_, not a reply to that comment. Replies to replies are not supported. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - post: operations["pulls/create-reply-for-review-comment"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}/commits": { - /** Lists a maximum of 250 commits for a pull request. To receive a complete commit list for pull requests with more than 250 commits, use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) endpoint. */ - get: operations["pulls/list-commits"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}/files": { - /** **Note:** Responses include a maximum of 3000 files. The paginated response returns 30 files per page by default. */ - get: operations["pulls/list-files"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}/merge": { - get: operations["pulls/check-if-merged"]; - /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details. */ - put: operations["pulls/merge"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": { - get: operations["pulls/list-requested-reviewers"]; - /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details. */ - post: operations["pulls/request-reviewers"]; - delete: operations["pulls/remove-requested-reviewers"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}/reviews": { - /** The list of reviews returns in chronological order. */ - get: operations["pulls/list-reviews"]; - /** - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - * - * Pull request reviews created in the `PENDING` state do not include the `submitted_at` property in the response. - * - * **Note:** To comment on a specific line in a file, you need to first determine the _position_ of that line in the diff. The GitHub REST API v3 offers the `application/vnd.github.v3.diff` [media type](https://docs.github.com/rest/overview/media-types#commits-commit-comparison-and-pull-requests). To see a pull request diff, add this media type to the `Accept` header of a call to the [single pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) endpoint. - * - * The `position` value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. - */ - post: operations["pulls/create-review"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": { - get: operations["pulls/get-review"]; - /** Update the review summary comment with new text. */ - put: operations["pulls/update-review"]; - delete: operations["pulls/delete-pending-review"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments": { - /** List comments for a specific pull request review. */ - get: operations["pulls/list-comments-for-review"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals": { - /** **Note:** To dismiss a pull request review on a [protected branch](https://docs.github.com/rest/reference/repos#branches), you must be a repository administrator or be included in the list of people or teams who can dismiss pull request reviews. */ - put: operations["pulls/dismiss-review"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events": { - post: operations["pulls/submit-review"]; - }; - "/repos/{owner}/{repo}/pulls/{pull_number}/update-branch": { - /** Updates the pull request branch with the latest upstream changes by merging HEAD from the base branch into the pull request branch. */ - put: operations["pulls/update-branch"]; - }; - "/repos/{owner}/{repo}/readme": { - /** - * Gets the preferred README for a repository. - * - * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. - */ - get: operations["repos/get-readme"]; - }; - "/repos/{owner}/{repo}/readme/{dir}": { - /** - * Gets the README from a repository directory. - * - * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. - */ - get: operations["repos/get-readme-in-directory"]; - }; - "/repos/{owner}/{repo}/releases": { - /** - * This returns a list of releases, which does not include regular Git tags that have not been associated with a release. To get a list of Git tags, use the [Repository Tags API](https://docs.github.com/rest/reference/repos#list-repository-tags). - * - * Information about published releases are available to everyone. Only users with push access will receive listings for draft releases. - */ - get: operations["repos/list-releases"]; - /** - * Users with push access to the repository can create a release. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - post: operations["repos/create-release"]; - }; - "/repos/{owner}/{repo}/releases/assets/{asset_id}": { - /** To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. */ - get: operations["repos/get-release-asset"]; - delete: operations["repos/delete-release-asset"]; - /** Users with push access to the repository can edit a release asset. */ - patch: operations["repos/update-release-asset"]; - }; - "/repos/{owner}/{repo}/releases/latest": { - /** - * View the latest published full release for the repository. - * - * The latest release is the most recent non-prerelease, non-draft release, sorted by the `created_at` attribute. The `created_at` attribute is the date of the commit used for the release, and not the date when the release was drafted or published. - */ - get: operations["repos/get-latest-release"]; - }; - "/repos/{owner}/{repo}/releases/tags/{tag}": { - /** Get a published release with the specified tag. */ - get: operations["repos/get-release-by-tag"]; - }; - "/repos/{owner}/{repo}/releases/{release_id}": { - /** **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). */ - get: operations["repos/get-release"]; - /** Users with push access to the repository can delete a release. */ - delete: operations["repos/delete-release"]; - /** Users with push access to the repository can edit a release. */ - patch: operations["repos/update-release"]; - }; - "/repos/{owner}/{repo}/releases/{release_id}/assets": { - get: operations["repos/list-release-assets"]; - /** - * This endpoint makes use of [a Hypermedia relation](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) to determine which URL to access. The endpoint you call to upload release assets is specific to your release. Use the `upload_url` returned in - * the response of the [Create a release endpoint](https://docs.github.com/rest/reference/repos#create-a-release) to upload a release asset. - * - * You need to use an HTTP client which supports [SNI](http://en.wikipedia.org/wiki/Server_Name_Indication) to make calls to this endpoint. - * - * Most libraries will set the required `Content-Length` header automatically. Use the required `Content-Type` header to provide the media type of the asset. For a list of media types, see [Media Types](https://www.iana.org/assignments/media-types/media-types.xhtml). For example: - * - * `application/zip` - * - * GitHub expects the asset data in its raw binary form, rather than JSON. You will send the raw binary content of the asset as the request body. Everything else about the endpoint is the same as the rest of the API. For example, - * you'll still need to pass your authentication to be able to upload an asset. - * - * When an upstream failure occurs, you will receive a `502 Bad Gateway` status. This may leave an empty asset with a state of `starter`. It can be safely deleted. - * - * **Notes:** - * * GitHub renames asset filenames that have special characters, non-alphanumeric characters, and leading or trailing periods. The "[List assets for a release](https://docs.github.com/rest/reference/repos#list-assets-for-a-release)" - * endpoint lists the renamed filenames. For more information and help, contact [GitHub Support](https://support.github.com/contact). - * * If you upload an asset with the same filename as another uploaded asset, you'll receive an error and must delete the old file before you can re-upload the new asset. - */ - post: operations["repos/upload-release-asset"]; - }; - "/repos/{owner}/{repo}/secret-scanning/alerts": { - /** - * Lists all secret scanning alerts for a private repository, from newest to oldest. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope. - * - * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. - */ - get: operations["secret-scanning/list-alerts-for-repo"]; - }; - "/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": { - /** - * Gets a single secret scanning alert detected in a private repository. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope. - * - * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. - */ - get: operations["secret-scanning/get-alert"]; - /** - * Updates the status of a secret scanning alert in a private repository. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope. - * - * GitHub Apps must have the `secret_scanning_alerts` write permission to use this endpoint. - */ - patch: operations["secret-scanning/update-alert"]; - }; - "/repos/{owner}/{repo}/stargazers": { - /** - * Lists the people that have starred the repository. - * - * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: - */ - get: operations["activity/list-stargazers-for-repo"]; - }; - "/repos/{owner}/{repo}/stats/code_frequency": { - /** Returns a weekly aggregate of the number of additions and deletions pushed to a repository. */ - get: operations["repos/get-code-frequency-stats"]; - }; - "/repos/{owner}/{repo}/stats/commit_activity": { - /** Returns the last year of commit activity grouped by week. The `days` array is a group of commits per day, starting on `Sunday`. */ - get: operations["repos/get-commit-activity-stats"]; - }; - "/repos/{owner}/{repo}/stats/contributors": { - /** - * Returns the `total` number of commits authored by the contributor. In addition, the response includes a Weekly Hash (`weeks` array) with the following information: - * - * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). - * * `a` - Number of additions - * * `d` - Number of deletions - * * `c` - Number of commits - */ - get: operations["repos/get-contributors-stats"]; - }; - "/repos/{owner}/{repo}/stats/participation": { - /** - * Returns the total commit counts for the `owner` and total commit counts in `all`. `all` is everyone combined, including the `owner` in the last 52 weeks. If you'd like to get the commit counts for non-owners, you can subtract `owner` from `all`. - * - * The array order is oldest week (index 0) to most recent week. - */ - get: operations["repos/get-participation-stats"]; - }; - "/repos/{owner}/{repo}/stats/punch_card": { - /** - * Each array contains the day number, hour number, and number of commits: - * - * * `0-6`: Sunday - Saturday - * * `0-23`: Hour of day - * * Number of commits - * - * For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. - */ - get: operations["repos/get-punch-card-stats"]; - }; - "/repos/{owner}/{repo}/statuses/{sha}": { - /** - * Users with push access in a repository can create commit statuses for a given SHA. - * - * Note: there is a limit of 1000 statuses per `sha` and `context` within a repository. Attempts to create more than 1000 statuses will result in a validation error. - */ - post: operations["repos/create-commit-status"]; - }; - "/repos/{owner}/{repo}/subscribers": { - /** Lists the people watching the specified repository. */ - get: operations["activity/list-watchers-for-repo"]; - }; - "/repos/{owner}/{repo}/subscription": { - get: operations["activity/get-repo-subscription"]; - /** If you would like to watch a repository, set `subscribed` to `true`. If you would like to ignore notifications made within a repository, set `ignored` to `true`. If you would like to stop watching a repository, [delete the repository's subscription](https://docs.github.com/rest/reference/activity#delete-a-repository-subscription) completely. */ - put: operations["activity/set-repo-subscription"]; - /** This endpoint should only be used to stop watching a repository. To control whether or not you wish to receive notifications from a repository, [set the repository's subscription manually](https://docs.github.com/rest/reference/activity#set-a-repository-subscription). */ - delete: operations["activity/delete-repo-subscription"]; - }; - "/repos/{owner}/{repo}/tags": { - get: operations["repos/list-tags"]; - }; - "/repos/{owner}/{repo}/tarball/{ref}": { - /** - * Gets a redirect URL to download a tar archive for a repository. If you omit `:ref`, the repository’s default branch (usually - * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use - * the `Location` header to make a second `GET` request. - * **Note**: For private repositories, these links are temporary and expire after five minutes. - */ - get: operations["repos/download-tarball-archive"]; - }; - "/repos/{owner}/{repo}/teams": { - get: operations["repos/list-teams"]; - }; - "/repos/{owner}/{repo}/topics": { - get: operations["repos/get-all-topics"]; - put: operations["repos/replace-all-topics"]; - }; - "/repos/{owner}/{repo}/traffic/clones": { - /** Get the total number of clones and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. */ - get: operations["repos/get-clones"]; - }; - "/repos/{owner}/{repo}/traffic/popular/paths": { - /** Get the top 10 popular contents over the last 14 days. */ - get: operations["repos/get-top-paths"]; - }; - "/repos/{owner}/{repo}/traffic/popular/referrers": { - /** Get the top 10 referrers over the last 14 days. */ - get: operations["repos/get-top-referrers"]; - }; - "/repos/{owner}/{repo}/traffic/views": { - /** Get the total number of views and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. */ - get: operations["repos/get-views"]; - }; - "/repos/{owner}/{repo}/transfer": { - /** A transfer request will need to be accepted by the new owner when transferring a personal repository to another user. The response will contain the original `owner`, and the transfer will continue asynchronously. For more details on the requirements to transfer personal and organization-owned repositories, see [about repository transfers](https://help.github.com/articles/about-repository-transfers/). */ - post: operations["repos/transfer"]; - }; - "/repos/{owner}/{repo}/vulnerability-alerts": { - /** Shows whether dependency alerts are enabled or disabled for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://help.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ - get: operations["repos/check-vulnerability-alerts"]; - /** Enables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://help.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ - put: operations["repos/enable-vulnerability-alerts"]; - /** Disables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://help.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ - delete: operations["repos/disable-vulnerability-alerts"]; - }; - "/repos/{owner}/{repo}/zipball/{ref}": { - /** - * Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually - * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use - * the `Location` header to make a second `GET` request. - * **Note**: For private repositories, these links are temporary and expire after five minutes. - */ - get: operations["repos/download-zipball-archive"]; - }; - "/repos/{template_owner}/{template_repo}/generate": { - /** - * Creates a new repository using a repository template. Use the `template_owner` and `template_repo` route parameters to specify the repository to use as the template. The authenticated user must own or be a member of an organization that owns the repository. To check if a repository is available to use as a template, get the repository's information using the [Get a repository](https://docs.github.com/rest/reference/repos#get-a-repository) endpoint and check that the `is_template` key is `true`. - * - * **OAuth scope requirements** - * - * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: - * - * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. - * * `repo` scope to create a private repository - */ - post: operations["repos/create-using-template"]; - }; - "/repositories": { - /** - * Lists all public repositories in the order that they were created. - * - * Notes: - * - For GitHub Enterprise Server and GitHub AE, this endpoint will only list repositories available to all users on the enterprise. - * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of repositories. - */ - get: operations["repos/list-public"]; - }; - "/repositories/{repository_id}/environments/{environment_name}/secrets": { - /** Lists all secrets available in an environment without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - get: operations["actions/list-environment-secrets"]; - }; - "/repositories/{repository_id}/environments/{environment_name}/secrets/public-key": { - /** Get the public key for an environment, which you need to encrypt environment secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - get: operations["actions/get-environment-public-key"]; - }; - "/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": { - /** Gets a single environment secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - get: operations["actions/get-environment-secret"]; - /** - * Creates or updates an environment secret with an encrypted value. Encrypt your secret using - * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access - * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use - * this endpoint. - * - * #### Example encrypting a secret using Node.js - * - * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. - * - * ``` - * const sodium = require('tweetsodium'); - * - * const key = "base64-encoded-public-key"; - * const value = "plain-text-secret"; - * - * // Convert the message and key to Uint8Array's (Buffer implements that interface) - * const messageBytes = Buffer.from(value); - * const keyBytes = Buffer.from(key, 'base64'); - * - * // Encrypt using LibSodium. - * const encryptedBytes = sodium.seal(messageBytes, keyBytes); - * - * // Base64 the encrypted secret - * const encrypted = Buffer.from(encryptedBytes).toString('base64'); - * - * console.log(encrypted); - * ``` - * - * - * #### Example encrypting a secret using Python - * - * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/stable/public/#nacl-public-sealedbox) with Python 3. - * - * ``` - * from base64 import b64encode - * from nacl import encoding, public - * - * def encrypt(public_key: str, secret_value: str) -> str: - * """Encrypt a Unicode string using the public key.""" - * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) - * sealed_box = public.SealedBox(public_key) - * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) - * return b64encode(encrypted).decode("utf-8") - * ``` - * - * #### Example encrypting a secret using C# - * - * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. - * - * ``` - * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); - * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); - * - * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); - * - * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); - * ``` - * - * #### Example encrypting a secret using Ruby - * - * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. - * - * ```ruby - * require "rbnacl" - * require "base64" - * - * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") - * public_key = RbNaCl::PublicKey.new(key) - * - * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) - * encrypted_secret = box.encrypt("my_secret") - * - * # Print the base64 encoded secret - * puts Base64.strict_encode64(encrypted_secret) - * ``` - */ - put: operations["actions/create-or-update-environment-secret"]; - /** Deletes a secret in an environment using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - delete: operations["actions/delete-environment-secret"]; - }; - "/scim/v2/enterprises/{enterprise}/Groups": { - /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ - get: operations["enterprise-admin/list-provisioned-groups-enterprise"]; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Provision an enterprise group, and invite users to the group. This sends invitation emails to the email address of the invited users to join the GitHub organization that the SCIM group corresponds to. - */ - post: operations["enterprise-admin/provision-and-invite-enterprise-group"]; - }; - "/scim/v2/enterprises/{enterprise}/Groups/{scim_group_id}": { - /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ - get: operations["enterprise-admin/get-provisioning-information-for-enterprise-group"]; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Replaces an existing provisioned group’s information. You must provide all the information required for the group as if you were provisioning it for the first time. Any existing group information that you don't provide will be removed, including group membership. If you want to only update a specific attribute, use the [Update an attribute for a SCIM enterprise group](#update-an-attribute-for-a-scim-enterprise-group) endpoint instead. - */ - put: operations["enterprise-admin/set-information-for-provisioned-enterprise-group"]; - /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ - delete: operations["enterprise-admin/delete-scim-group-from-enterprise"]; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Allows you to change a provisioned group’s individual attributes. To change a group’s values, you must provide a specific Operations JSON format that contains at least one of the add, remove, or replace operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2). - */ - patch: operations["enterprise-admin/update-attribute-for-enterprise-group"]; - }; - "/scim/v2/enterprises/{enterprise}/Users": { - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Retrieves a paginated list of all provisioned enterprise members, including pending invitations. - * - * When a user with a SAML-provisioned external identity leaves (or is removed from) an enterprise, the account's metadata is immediately removed. However, the returned list of user accounts might not always match the organization or enterprise member list you see on GitHub. This can happen in certain cases where an external identity associated with an organization will not match an organization member: - * - When a user with a SCIM-provisioned external identity is removed from an enterprise, the account's metadata is preserved to allow the user to re-join the organization in the future. - * - When inviting a user to join an organization, you can expect to see their external identity in the results before they accept the invitation, or if the invitation is cancelled (or never accepted). - * - When a user is invited over SCIM, an external identity is created that matches with the invitee's email address. However, this identity is only linked to a user account when the user accepts the invitation by going through SAML SSO. - * - * The returned list of external identities can include an entry for a `null` user. These are unlinked SAML identities that are created when a user goes through the following Single Sign-On (SSO) process but does not sign in to their GitHub account after completing SSO: - * - * 1. The user is granted access by the IdP and is not a member of the GitHub enterprise. - * - * 1. The user attempts to access the GitHub enterprise and initiates the SAML SSO process, and is not currently signed in to their GitHub account. - * - * 1. After successfully authenticating with the SAML SSO IdP, the `null` external identity entry is created and the user is prompted to sign in to their GitHub account: - * - If the user signs in, their GitHub account is linked to this entry. - * - If the user does not sign in (or does not create a new account when prompted), they are not added to the GitHub enterprise, and the external identity `null` entry remains in place. - */ - get: operations["enterprise-admin/list-provisioned-identities-enterprise"]; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Provision enterprise membership for a user, and send organization invitation emails to the email address. - * - * You can optionally include the groups a user will be invited to join. If you do not provide a list of `groups`, the user is provisioned for the enterprise, but no organization invitation emails will be sent. - */ - post: operations["enterprise-admin/provision-and-invite-enterprise-user"]; - }; - "/scim/v2/enterprises/{enterprise}/Users/{scim_user_id}": { - /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ - get: operations["enterprise-admin/get-provisioning-information-for-enterprise-user"]; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Replaces an existing provisioned user's information. You must provide all the information required for the user as if you were provisioning them for the first time. Any existing user information that you don't provide will be removed. If you want to only update a specific attribute, use the [Update an attribute for a SCIM user](#update-an-attribute-for-an-enterprise-scim-user) endpoint instead. - * - * You must at least provide the required values for the user: `userName`, `name`, and `emails`. - * - * **Warning:** Setting `active: false` removes the user from the enterprise, deletes the external identity, and deletes the associated `{scim_user_id}`. - */ - put: operations["enterprise-admin/set-information-for-provisioned-enterprise-user"]; - /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ - delete: operations["enterprise-admin/delete-user-from-enterprise"]; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Allows you to change a provisioned user's individual attributes. To change a user's values, you must provide a specific `Operations` JSON format that contains at least one of the `add`, `remove`, or `replace` operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2). - * - * **Note:** Complicated SCIM `path` selectors that include filters are not supported. For example, a `path` selector defined as `"path": "emails[type eq \"work\"]"` will not work. - * - * **Warning:** If you set `active:false` using the `replace` operation (as shown in the JSON example below), it removes the user from the enterprise, deletes the external identity, and deletes the associated `:scim_user_id`. - * - * ``` - * { - * "Operations":[{ - * "op":"replace", - * "value":{ - * "active":false - * } - * }] - * } - * ``` - */ - patch: operations["enterprise-admin/update-attribute-for-enterprise-user"]; - }; - "/scim/v2/organizations/{org}/Users": { - /** - * Retrieves a paginated list of all provisioned organization members, including pending invitations. If you provide the `filter` parameter, the resources for all matching provisions members are returned. - * - * When a user with a SAML-provisioned external identity leaves (or is removed from) an organization, the account's metadata is immediately removed. However, the returned list of user accounts might not always match the organization or enterprise member list you see on GitHub. This can happen in certain cases where an external identity associated with an organization will not match an organization member: - * - When a user with a SCIM-provisioned external identity is removed from an organization, the account's metadata is preserved to allow the user to re-join the organization in the future. - * - When inviting a user to join an organization, you can expect to see their external identity in the results before they accept the invitation, or if the invitation is cancelled (or never accepted). - * - When a user is invited over SCIM, an external identity is created that matches with the invitee's email address. However, this identity is only linked to a user account when the user accepts the invitation by going through SAML SSO. - * - * The returned list of external identities can include an entry for a `null` user. These are unlinked SAML identities that are created when a user goes through the following Single Sign-On (SSO) process but does not sign in to their GitHub account after completing SSO: - * - * 1. The user is granted access by the IdP and is not a member of the GitHub organization. - * - * 1. The user attempts to access the GitHub organization and initiates the SAML SSO process, and is not currently signed in to their GitHub account. - * - * 1. After successfully authenticating with the SAML SSO IdP, the `null` external identity entry is created and the user is prompted to sign in to their GitHub account: - * - If the user signs in, their GitHub account is linked to this entry. - * - If the user does not sign in (or does not create a new account when prompted), they are not added to the GitHub organization, and the external identity `null` entry remains in place. - */ - get: operations["scim/list-provisioned-identities"]; - /** Provision organization membership for a user, and send an activation email to the email address. */ - post: operations["scim/provision-and-invite-user"]; - }; - "/scim/v2/organizations/{org}/Users/{scim_user_id}": { - get: operations["scim/get-provisioning-information-for-user"]; - /** - * Replaces an existing provisioned user's information. You must provide all the information required for the user as if you were provisioning them for the first time. Any existing user information that you don't provide will be removed. If you want to only update a specific attribute, use the [Update an attribute for a SCIM user](https://docs.github.com/rest/reference/scim#update-an-attribute-for-a-scim-user) endpoint instead. - * - * You must at least provide the required values for the user: `userName`, `name`, and `emails`. - * - * **Warning:** Setting `active: false` removes the user from the organization, deletes the external identity, and deletes the associated `{scim_user_id}`. - */ - put: operations["scim/set-information-for-provisioned-user"]; - delete: operations["scim/delete-user-from-org"]; - /** - * Allows you to change a provisioned user's individual attributes. To change a user's values, you must provide a specific `Operations` JSON format that contains at least one of the `add`, `remove`, or `replace` operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2). - * - * **Note:** Complicated SCIM `path` selectors that include filters are not supported. For example, a `path` selector defined as `"path": "emails[type eq \"work\"]"` will not work. - * - * **Warning:** If you set `active:false` using the `replace` operation (as shown in the JSON example below), it removes the user from the organization, deletes the external identity, and deletes the associated `:scim_user_id`. - * - * ``` - * { - * "Operations":[{ - * "op":"replace", - * "value":{ - * "active":false - * } - * }] - * } - * ``` - */ - patch: operations["scim/update-attribute-for-user"]; - }; - "/search/code": { - /** - * Searches for query terms inside of a file. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for code, you can get text match metadata for the file **content** and file **path** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to find the definition of the `addClass` function inside [jQuery](https://github.com/jquery/jquery) repository, your query would look something like this: - * - * `q=addClass+in:file+language:js+repo:jquery/jquery` - * - * This query searches for the keyword `addClass` within a file's contents. The query limits the search to files where the language is JavaScript in the `jquery/jquery` repository. - * - * #### Considerations for code search - * - * Due to the complexity of searching code, there are a few restrictions on how searches are performed: - * - * * Only the _default branch_ is considered. In most cases, this will be the `master` branch. - * * Only files smaller than 384 KB are searchable. - * * You must always include at least one search term when searching source code. For example, searching for [`language:go`](https://github.com/search?utf8=%E2%9C%93&q=language%3Ago&type=Code) is not valid, while [`amazing - * language:go`](https://github.com/search?utf8=%E2%9C%93&q=amazing+language%3Ago&type=Code) is. - */ - get: operations["search/code"]; - }; - "/search/commits": { - /** - * Find commits via various criteria on the default branch (usually `master`). This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for commits, you can get text match metadata for the **message** field when you provide the `text-match` media type. For more details about how to receive highlighted search results, see [Text match - * metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to find commits related to CSS in the [octocat/Spoon-Knife](https://github.com/octocat/Spoon-Knife) repository. Your query would look something like this: - * - * `q=repo:octocat/Spoon-Knife+css` - */ - get: operations["search/commits"]; - }; - "/search/issues": { - /** - * Find issues by state and keyword. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for issues, you can get text match metadata for the issue **title**, issue **body**, and issue **comment body** fields when you pass the `text-match` media type. For more details about how to receive highlighted - * search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this. - * - * `q=windows+label:bug+language:python+state:open&sort=created&order=asc` - * - * This query searches for the keyword `windows`, within any open issue that is labeled as `bug`. The search runs across repositories whose primary language is Python. The results are sorted by creation date in ascending order, which means the oldest issues appear first in the search results. - * - * **Note:** For [user-to-server](https://docs.github.com/developers/apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests) GitHub App requests, you can't retrieve a combination of issues and pull requests in a single query. Requests that don't include the `is:issue` or `is:pull-request` qualifier will receive an HTTP `422 Unprocessable Entity` response. To get results for both issues and pull requests, you must send separate queries for issues and pull requests. For more information about the `is` qualifier, see "[Searching only issues or pull requests](https://docs.github.com/github/searching-for-information-on-github/searching-issues-and-pull-requests#search-only-issues-or-pull-requests)." - */ - get: operations["search/issues-and-pull-requests"]; - }; - "/search/labels": { - /** - * Find labels in a repository with names or descriptions that match search keywords. Returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for labels, you can get text match metadata for the label **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to find labels in the `linguist` repository that match `bug`, `defect`, or `enhancement`. Your query might look like this: - * - * `q=bug+defect+enhancement&repository_id=64778136` - * - * The labels that best match the query appear first in the search results. - */ - get: operations["search/labels"]; - }; - "/search/repositories": { - /** - * Find repositories via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for repositories, you can get text match metadata for the **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to search for popular Tetris repositories written in assembly code, your query might look like this: - * - * `q=tetris+language:assembly&sort=stars&order=desc` - * - * This query searches for repositories with the word `tetris` in the name, the description, or the README. The results are limited to repositories where the primary language is assembly. The results are sorted by stars in descending order, so that the most popular repositories appear first in the search results. - * - * When you include the `mercy` preview header, you can also search for multiple topics by adding more `topic:` instances. For example, your query might look like this: - * - * `q=topic:ruby+topic:rails` - */ - get: operations["search/repos"]; - }; - "/search/topics": { - /** - * Find topics via various criteria. Results are sorted by best match. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). See "[Searching topics](https://help.github.com/articles/searching-topics/)" for a detailed list of qualifiers. - * - * When searching for topics, you can get text match metadata for the topic's **short\_description**, **description**, **name**, or **display\_name** field when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to search for topics related to Ruby that are featured on https://github.com/topics. Your query might look like this: - * - * `q=ruby+is:featured` - * - * This query searches for topics with the keyword `ruby` and limits the results to find only topics that are featured. The topics that are the best match for the query appear first in the search results. - */ - get: operations["search/topics"]; - }; - "/search/users": { - /** - * Find users via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for users, you can get text match metadata for the issue **login**, **email**, and **name** fields when you pass the `text-match` media type. For more details about highlighting search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you're looking for a list of popular users, you might try this query: - * - * `q=tom+repos:%3E42+followers:%3E1000` - * - * This query searches for users with the name `tom`. The results are restricted to users with more than 42 repositories and over 1,000 followers. - */ - get: operations["search/users"]; - }; - "/teams/{team_id}": { - /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the [Get a team by name](https://docs.github.com/rest/reference/teams#get-a-team-by-name) endpoint. */ - get: operations["teams/get-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a team](https://docs.github.com/rest/reference/teams#delete-a-team) endpoint. - * - * To delete a team, the authenticated user must be an organization owner or team maintainer. - * - * If you are an organization owner, deleting a parent team will delete all of its child teams as well. - */ - delete: operations["teams/delete-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a team](https://docs.github.com/rest/reference/teams#update-a-team) endpoint. - * - * To edit a team, the authenticated user must either be an organization owner or a team maintainer. - * - * **Note:** With nested teams, the `privacy` for parent teams cannot be `secret`. - */ - patch: operations["teams/update-legacy"]; - }; - "/teams/{team_id}/discussions": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List discussions`](https://docs.github.com/rest/reference/teams#list-discussions) endpoint. - * - * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - get: operations["teams/list-discussions-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create a discussion`](https://docs.github.com/rest/reference/teams#create-a-discussion) endpoint. - * - * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - post: operations["teams/create-discussion-legacy"]; - }; - "/teams/{team_id}/discussions/{discussion_number}": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion](https://docs.github.com/rest/reference/teams#get-a-discussion) endpoint. - * - * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - get: operations["teams/get-discussion-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Delete a discussion`](https://docs.github.com/rest/reference/teams#delete-a-discussion) endpoint. - * - * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - delete: operations["teams/delete-discussion-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion](https://docs.github.com/rest/reference/teams#update-a-discussion) endpoint. - * - * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - patch: operations["teams/update-discussion-legacy"]; - }; - "/teams/{team_id}/discussions/{discussion_number}/comments": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List discussion comments](https://docs.github.com/rest/reference/teams#list-discussion-comments) endpoint. - * - * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - get: operations["teams/list-discussion-comments-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Create a discussion comment](https://docs.github.com/rest/reference/teams#create-a-discussion-comment) endpoint. - * - * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - post: operations["teams/create-discussion-comment-legacy"]; - }; - "/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion comment](https://docs.github.com/rest/reference/teams#get-a-discussion-comment) endpoint. - * - * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - get: operations["teams/get-discussion-comment-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a discussion comment](https://docs.github.com/rest/reference/teams#delete-a-discussion-comment) endpoint. - * - * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - delete: operations["teams/delete-discussion-comment-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion comment](https://docs.github.com/rest/reference/teams#update-a-discussion-comment) endpoint. - * - * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - patch: operations["teams/update-discussion-comment-legacy"]; - }; - "/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion comment`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment) endpoint. - * - * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - get: operations["reactions/list-for-team-discussion-comment-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Create reaction for a team discussion comment](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion-comment)" endpoint. - * - * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion comment. - */ - post: operations["reactions/create-for-team-discussion-comment-legacy"]; - }; - "/teams/{team_id}/discussions/{discussion_number}/reactions": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion) endpoint. - * - * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - get: operations["reactions/list-for-team-discussion-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create reaction for a team discussion`](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion) endpoint. - * - * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion. - */ - post: operations["reactions/create-for-team-discussion-legacy"]; - }; - "/teams/{team_id}/invitations": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List pending team invitations`](https://docs.github.com/rest/reference/teams#list-pending-team-invitations) endpoint. - * - * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. - */ - get: operations["teams/list-pending-invitations-legacy"]; - }; - "/teams/{team_id}/members": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team members`](https://docs.github.com/rest/reference/teams#list-team-members) endpoint. - * - * Team members will include the members of child teams. - */ - get: operations["teams/list-members-legacy"]; - }; - "/teams/{team_id}/members/{username}": { - /** - * The "Get team member" endpoint (described below) is deprecated. - * - * We recommend using the [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint instead. It allows you to get both active and pending memberships. - * - * To list members in a team, the team must be visible to the authenticated user. - */ - get: operations["teams/get-member-legacy"]; - /** - * The "Add team member" endpoint (described below) is deprecated. - * - * We recommend using the [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint instead. It allows you to invite new organization members to your teams. - * - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To add someone to a team, the authenticated user must be an organization owner or a team maintainer in the team they're changing. The person being added to the team must be a member of the team's organization. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - * - * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - put: operations["teams/add-member-legacy"]; - /** - * The "Remove team member" endpoint (described below) is deprecated. - * - * We recommend using the [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint instead. It allows you to remove both active and pending memberships. - * - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To remove a team member, the authenticated user must have 'admin' permissions to the team or be an owner of the org that the team is associated with. Removing a team member does not delete the user, it just removes them from the team. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - */ - delete: operations["teams/remove-member-legacy"]; - }; - "/teams/{team_id}/memberships/{username}": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint. - * - * Team members will include the members of child teams. - * - * To get a user's membership with a team, the team must be visible to the authenticated user. - * - * **Note:** - * The response contains the `state` of the membership and the member's `role`. - * - * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). - */ - get: operations["teams/get-membership-for-user-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint. - * - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * If the user is already a member of the team's organization, this endpoint will add the user to the team. To add a membership between an organization member and a team, the authenticated user must be an organization owner or a team maintainer. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - * - * If the user is unaffiliated with the team's organization, this endpoint will send an invitation to the user via email. This newly-created membership will be in the "pending" state until the user accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. To add a membership between an unaffiliated user and a team, the authenticated user must be an organization owner. - * - * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. - */ - put: operations["teams/add-or-update-membership-for-user-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint. - * - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - */ - delete: operations["teams/remove-membership-for-user-legacy"]; - }; - "/teams/{team_id}/projects": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team projects`](https://docs.github.com/rest/reference/teams#list-team-projects) endpoint. - * - * Lists the organization projects for a team. - */ - get: operations["teams/list-projects-legacy"]; - }; - "/teams/{team_id}/projects/{project_id}": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a project](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project) endpoint. - * - * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. - */ - get: operations["teams/check-permissions-for-project-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team project permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions) endpoint. - * - * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. - */ - put: operations["teams/add-or-update-project-permissions-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a project from a team](https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team) endpoint. - * - * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. **Note:** This endpoint removes the project from the team, but does not delete it. - */ - delete: operations["teams/remove-project-legacy"]; - }; - "/teams/{team_id}/repos": { - /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List team repositories](https://docs.github.com/rest/reference/teams#list-team-repositories) endpoint. */ - get: operations["teams/list-repos-legacy"]; - }; - "/teams/{team_id}/repos/{owner}/{repo}": { - /** - * **Note**: Repositories inherited through a parent team will also be checked. - * - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a repository](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-repository) endpoint. - * - * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: - */ - get: operations["teams/check-permissions-for-repo-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Add or update team repository permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-repository-permissions)" endpoint. - * - * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. - * - * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - put: operations["teams/add-or-update-repo-permissions-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a repository from a team](https://docs.github.com/rest/reference/teams#remove-a-repository-from-a-team) endpoint. - * - * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. NOTE: This does not delete the repository, it just removes it from the team. - */ - delete: operations["teams/remove-repo-legacy"]; - }; - "/teams/{team_id}/team-sync/group-mappings": { - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List IdP groups for a team`](https://docs.github.com/rest/reference/teams#list-idp-groups-for-a-team) endpoint. - * - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * List IdP groups connected to a team on GitHub. - */ - get: operations["teams/list-idp-groups-for-legacy"]; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create or update IdP group connections`](https://docs.github.com/rest/reference/teams#create-or-update-idp-group-connections) endpoint. - * - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Creates, updates, or removes a connection between a team and an IdP group. When adding groups to a team, you must include all new and existing groups to avoid replacing existing groups with the new ones. Specifying an empty `groups` array will remove all connections for a team. - */ - patch: operations["teams/create-or-update-idp-group-connections-legacy"]; - }; - "/teams/{team_id}/teams": { - /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List child teams`](https://docs.github.com/rest/reference/teams#list-child-teams) endpoint. */ - get: operations["teams/list-child-legacy"]; - }; - "/user": { - /** - * If the authenticated user is authenticated through basic authentication or OAuth with the `user` scope, then the response lists public and private profile information. - * - * If the authenticated user is authenticated through OAuth without the `user` scope, then the response lists only public profile information. - */ - get: operations["users/get-authenticated"]; - /** **Note:** If your email is set to private and you send an `email` parameter as part of this request to update your profile, your privacy settings are still enforced: the email address will not be displayed on your public profile or via the API. */ - patch: operations["users/update-authenticated"]; - }; - "/user/blocks": { - /** List the users you've blocked on your personal account. */ - get: operations["users/list-blocked-by-authenticated"]; - }; - "/user/blocks/{username}": { - get: operations["users/check-blocked"]; - put: operations["users/block"]; - delete: operations["users/unblock"]; - }; - "/user/email/visibility": { - /** Sets the visibility for your primary email addresses. */ - patch: operations["users/set-primary-email-visibility-for-authenticated"]; - }; - "/user/emails": { - /** Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. */ - get: operations["users/list-emails-for-authenticated"]; - /** This endpoint is accessible with the `user` scope. */ - post: operations["users/add-email-for-authenticated"]; - /** This endpoint is accessible with the `user` scope. */ - delete: operations["users/delete-email-for-authenticated"]; - }; - "/user/followers": { - /** Lists the people following the authenticated user. */ - get: operations["users/list-followers-for-authenticated-user"]; - }; - "/user/following": { - /** Lists the people who the authenticated user follows. */ - get: operations["users/list-followed-by-authenticated"]; - }; - "/user/following/{username}": { - get: operations["users/check-person-is-followed-by-authenticated"]; - /** - * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - * - * Following a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. - */ - put: operations["users/follow"]; - /** Unfollowing a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. */ - delete: operations["users/unfollow"]; - }; - "/user/gpg_keys": { - /** Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - get: operations["users/list-gpg-keys-for-authenticated"]; - /** Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - post: operations["users/create-gpg-key-for-authenticated"]; - }; - "/user/gpg_keys/{gpg_key_id}": { - /** View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - get: operations["users/get-gpg-key-for-authenticated"]; - /** Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - delete: operations["users/delete-gpg-key-for-authenticated"]; - }; - "/user/installations": { - /** - * Lists installations of your GitHub App that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. - * - * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. - * - * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. - * - * You can find the permissions for the installation under the `permissions` key. - */ - get: operations["apps/list-installations-for-authenticated-user"]; - }; - "/user/installations/{installation_id}/repositories": { - /** - * List repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access for an installation. - * - * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. - * - * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. - * - * The access the user has to each repository is included in the hash under the `permissions` key. - */ - get: operations["apps/list-installation-repos-for-authenticated-user"]; - }; - "/user/installations/{installation_id}/repositories/{repository_id}": { - /** - * Add a single repository to an installation. The authenticated user must have admin access to the repository. - * - * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. - */ - put: operations["apps/add-repo-to-installation"]; - /** - * Remove a single repository from an installation. The authenticated user must have admin access to the repository. - * - * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. - */ - delete: operations["apps/remove-repo-from-installation"]; - }; - "/user/interaction-limits": { - /** Shows which type of GitHub user can interact with your public repositories and when the restriction expires. */ - get: operations["interactions/get-restrictions-for-authenticated-user"]; - /** Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. */ - put: operations["interactions/set-restrictions-for-authenticated-user"]; - /** Removes any interaction restrictions from your public repositories. */ - delete: operations["interactions/remove-restrictions-for-authenticated-user"]; - }; - "/user/issues": { - /** - * List issues across owned and member repositories assigned to the authenticated user. - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - get: operations["issues/list-for-authenticated-user"]; - }; - "/user/keys": { - /** Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - get: operations["users/list-public-ssh-keys-for-authenticated"]; - /** Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - post: operations["users/create-public-ssh-key-for-authenticated"]; - }; - "/user/keys/{key_id}": { - /** View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - get: operations["users/get-public-ssh-key-for-authenticated"]; - /** Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - delete: operations["users/delete-public-ssh-key-for-authenticated"]; - }; - "/user/marketplace_purchases": { - /** Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). */ - get: operations["apps/list-subscriptions-for-authenticated-user"]; - }; - "/user/marketplace_purchases/stubbed": { - /** Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). */ - get: operations["apps/list-subscriptions-for-authenticated-user-stubbed"]; - }; - "/user/memberships/orgs": { - get: operations["orgs/list-memberships-for-authenticated-user"]; - }; - "/user/memberships/orgs/{org}": { - get: operations["orgs/get-membership-for-authenticated-user"]; - patch: operations["orgs/update-membership-for-authenticated-user"]; - }; - "/user/migrations": { - /** Lists all migrations a user has started. */ - get: operations["migrations/list-for-authenticated-user"]; - /** Initiates the generation of a user migration archive. */ - post: operations["migrations/start-for-authenticated-user"]; - }; - "/user/migrations/{migration_id}": { - /** - * Fetches a single user migration. The response includes the `state` of the migration, which can be one of the following values: - * - * * `pending` - the migration hasn't started yet. - * * `exporting` - the migration is in progress. - * * `exported` - the migration finished successfully. - * * `failed` - the migration failed. - * - * Once the migration has been `exported` you can [download the migration archive](https://docs.github.com/rest/reference/migrations#download-a-user-migration-archive). - */ - get: operations["migrations/get-status-for-authenticated-user"]; - }; - "/user/migrations/{migration_id}/archive": { - /** - * Fetches the URL to download the migration archive as a `tar.gz` file. Depending on the resources your repository uses, the migration archive can contain JSON files with data for these objects: - * - * * attachments - * * bases - * * commit\_comments - * * issue\_comments - * * issue\_events - * * issues - * * milestones - * * organizations - * * projects - * * protected\_branches - * * pull\_request\_reviews - * * pull\_requests - * * releases - * * repositories - * * review\_comments - * * schema - * * users - * - * The archive will also contain an `attachments` directory that includes all attachment files uploaded to GitHub.com and a `repositories` directory that contains the repository's Git data. - */ - get: operations["migrations/get-archive-for-authenticated-user"]; - /** Deletes a previous migration archive. Downloadable migration archives are automatically deleted after seven days. Migration metadata, which is returned in the [List user migrations](https://docs.github.com/rest/reference/migrations#list-user-migrations) and [Get a user migration status](https://docs.github.com/rest/reference/migrations#get-a-user-migration-status) endpoints, will continue to be available even after an archive is deleted. */ - delete: operations["migrations/delete-archive-for-authenticated-user"]; - }; - "/user/migrations/{migration_id}/repos/{repo_name}/lock": { - /** Unlocks a repository. You can lock repositories when you [start a user migration](https://docs.github.com/rest/reference/migrations#start-a-user-migration). Once the migration is complete you can unlock each repository to begin using it again or [delete the repository](https://docs.github.com/rest/reference/repos#delete-a-repository) if you no longer need the source data. Returns a status of `404 Not Found` if the repository is not locked. */ - delete: operations["migrations/unlock-repo-for-authenticated-user"]; - }; - "/user/migrations/{migration_id}/repositories": { - /** Lists all the repositories for this user migration. */ - get: operations["migrations/list-repos-for-user"]; - }; - "/user/orgs": { - /** - * List organizations for the authenticated user. - * - * **OAuth scope requirements** - * - * This only lists organizations that your authorization allows you to operate on in some way (e.g., you can list teams with `read:org` scope, you can publicize your organization membership with `user` scope, etc.). Therefore, this API requires at least `user` or `read:org` scope. OAuth requests with insufficient scope receive a `403 Forbidden` response. - */ - get: operations["orgs/list-for-authenticated-user"]; - }; - "/user/packages/{package_type}/{package_name}": { - /** - * Gets a specific package for a package owned by the authenticated user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - get: operations["packages/get-package-for-authenticated-user"]; - /** - * Deletes a package owned by the authenticated user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - delete: operations["packages/delete-package-for-authenticated-user"]; - }; - "/user/packages/{package_type}/{package_name}/restore": { - /** - * Restores a package owned by the authenticated user. - * - * You can restore a deleted package under the following conditions: - * - The package was deleted within the last 30 days. - * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scope. If `package_type` is not `container`, your token must also include the `repo` scope. - */ - post: operations["packages/restore-package-for-authenticated-user"]; - }; - "/user/packages/{package_type}/{package_name}/versions": { - /** - * Returns all package versions for a package owned by the authenticated user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - get: operations["packages/get-all-package-versions-for-package-owned-by-authenticated-user"]; - }; - "/user/packages/{package_type}/{package_name}/versions/{package_version_id}": { - /** - * Gets a specific package version for a package owned by the authenticated user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - get: operations["packages/get-package-version-for-authenticated-user"]; - /** - * Deletes a specific package version for a package owned by the authenticated user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - delete: operations["packages/delete-package-version-for-authenticated-user"]; - }; - "/user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": { - /** - * Restores a package version owned by the authenticated user. - * - * You can restore a deleted package version under the following conditions: - * - The package was deleted within the last 30 days. - * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scope. If `package_type` is not `container`, your token must also include the `repo` scope. - */ - post: operations["packages/restore-package-version-for-authenticated-user"]; - }; - "/user/projects": { - post: operations["projects/create-for-authenticated-user"]; - }; - "/user/public_emails": { - /** Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. */ - get: operations["users/list-public-emails-for-authenticated"]; - }; - "/user/repos": { - /** - * Lists repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. - * - * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. - */ - get: operations["repos/list-for-authenticated-user"]; - /** - * Creates a new repository for the authenticated user. - * - * **OAuth scope requirements** - * - * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: - * - * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. - * * `repo` scope to create a private repository. - */ - post: operations["repos/create-for-authenticated-user"]; - }; - "/user/repository_invitations": { - /** When authenticating as a user, this endpoint will list all currently open repository invitations for that user. */ - get: operations["repos/list-invitations-for-authenticated-user"]; - }; - "/user/repository_invitations/{invitation_id}": { - delete: operations["repos/decline-invitation"]; - patch: operations["repos/accept-invitation"]; - }; - "/user/starred": { - /** - * Lists repositories the authenticated user has starred. - * - * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: - */ - get: operations["activity/list-repos-starred-by-authenticated-user"]; - }; - "/user/starred/{owner}/{repo}": { - get: operations["activity/check-repo-is-starred-by-authenticated-user"]; - /** Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." */ - put: operations["activity/star-repo-for-authenticated-user"]; - delete: operations["activity/unstar-repo-for-authenticated-user"]; - }; - "/user/subscriptions": { - /** Lists repositories the authenticated user is watching. */ - get: operations["activity/list-watched-repos-for-authenticated-user"]; - }; - "/user/teams": { - /** List all of the teams across all of the organizations to which the authenticated user belongs. This method requires `user`, `repo`, or `read:org` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/) when authenticating via [OAuth](https://docs.github.com/apps/building-oauth-apps/). */ - get: operations["teams/list-for-authenticated-user"]; - }; - "/users": { - /** - * Lists all users, in the order that they signed up on GitHub. This list includes personal user accounts and organization accounts. - * - * Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of users. - */ - get: operations["users/list"]; - }; - "/users/{username}": { - /** - * Provides publicly available information about someone with a GitHub account. - * - * GitHub Apps with the `Plan` user permission can use this endpoint to retrieve information about a user's GitHub plan. The GitHub App must be authenticated as a user. See "[Identifying and authorizing users for GitHub Apps](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/)" for details about authentication. For an example response, see 'Response with GitHub plan information' below" - * - * The `email` key in the following response is the publicly visible email address from your GitHub [profile page](https://github.com/settings/profile). When setting up your profile, you can select a primary email address to be “public” which provides an email entry for this endpoint. If you do not set a public email address for `email`, then it will have a value of `null`. You only see publicly visible email addresses when authenticated with GitHub. For more information, see [Authentication](https://docs.github.com/rest/overview/resources-in-the-rest-api#authentication). - * - * The Emails API enables you to list all of your email addresses, and toggle a primary email to be visible publicly. For more information, see "[Emails API](https://docs.github.com/rest/reference/users#emails)". - */ - get: operations["users/get-by-username"]; - }; - "/users/{username}/events": { - /** If you are authenticated as the given user, you will see your private events. Otherwise, you'll only see public events. */ - get: operations["activity/list-events-for-authenticated-user"]; - }; - "/users/{username}/events/orgs/{org}": { - /** This is the user's organization dashboard. You must be authenticated as the user to view this. */ - get: operations["activity/list-org-events-for-authenticated-user"]; - }; - "/users/{username}/events/public": { - get: operations["activity/list-public-events-for-user"]; - }; - "/users/{username}/followers": { - /** Lists the people following the specified user. */ - get: operations["users/list-followers-for-user"]; - }; - "/users/{username}/following": { - /** Lists the people who the specified user follows. */ - get: operations["users/list-following-for-user"]; - }; - "/users/{username}/following/{target_user}": { - get: operations["users/check-following-for-user"]; - }; - "/users/{username}/gists": { - /** Lists public gists for the specified user: */ - get: operations["gists/list-for-user"]; - }; - "/users/{username}/gpg_keys": { - /** Lists the GPG keys for a user. This information is accessible by anyone. */ - get: operations["users/list-gpg-keys-for-user"]; - }; - "/users/{username}/hovercard": { - /** - * Provides hovercard information when authenticated through basic auth or OAuth with the `repo` scope. You can find out more about someone in relation to their pull requests, issues, repositories, and organizations. - * - * The `subject_type` and `subject_id` parameters provide context for the person's hovercard, which returns more information than without the parameters. For example, if you wanted to find out more about `octocat` who owns the `Spoon-Knife` repository via cURL, it would look like this: - * - * ```shell - * curl -u username:token - * https://api.github.com/users/octocat/hovercard?subject_type=repository&subject_id=1300192 - * ``` - */ - get: operations["users/get-context-for-user"]; - }; - "/users/{username}/installation": { - /** - * Enables an authenticated GitHub App to find the user’s installation information. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - get: operations["apps/get-user-installation"]; - }; - "/users/{username}/keys": { - /** Lists the _verified_ public SSH keys for a user. This is accessible by anyone. */ - get: operations["users/list-public-keys-for-user"]; - }; - "/users/{username}/orgs": { - /** - * List [public organization memberships](https://help.github.com/articles/publicizing-or-concealing-organization-membership) for the specified user. - * - * This method only lists _public_ memberships, regardless of authentication. If you need to fetch all of the organization memberships (public and private) for the authenticated user, use the [List organizations for the authenticated user](https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user) API instead. - */ - get: operations["orgs/list-for-user"]; - }; - "/users/{username}/packages/{package_type}/{package_name}": { - /** - * Gets a specific package metadata for a public package owned by a user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - get: operations["packages/get-package-for-user"]; - }; - "/users/{username}/packages/{package_type}/{package_name}/versions": { - /** - * Returns all package versions for a public package owned by a specified user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - get: operations["packages/get-all-package-versions-for-package-owned-by-user"]; - }; - "/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}": { - /** - * Gets a specific package version for a public package owned by a specified user. - * - * At this time, to use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - get: operations["packages/get-package-version-for-user"]; - }; - "/users/{username}/projects": { - get: operations["projects/list-for-user"]; - }; - "/users/{username}/received_events": { - /** These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events. */ - get: operations["activity/list-received-events-for-user"]; - }; - "/users/{username}/received_events/public": { - get: operations["activity/list-received-public-events-for-user"]; - }; - "/users/{username}/repos": { - /** Lists public repositories for the specified user. Note: For GitHub AE, this endpoint will list internal repositories for the specified user. */ - get: operations["repos/list-for-user"]; - }; - "/users/{username}/settings/billing/actions": { - /** - * Gets the summary of the free and paid GitHub Actions minutes used. - * - * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * Access tokens must have the `user` scope. - */ - get: operations["billing/get-github-actions-billing-user"]; - }; - "/users/{username}/settings/billing/packages": { - /** - * Gets the free and paid storage used for GitHub Packages in gigabytes. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * Access tokens must have the `user` scope. - */ - get: operations["billing/get-github-packages-billing-user"]; - }; - "/users/{username}/settings/billing/shared-storage": { - /** - * Gets the estimated paid and estimated total storage used for GitHub Actions and Github Packages. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * Access tokens must have the `user` scope. - */ - get: operations["billing/get-shared-storage-billing-user"]; - }; - "/users/{username}/starred": { - /** - * Lists repositories a user has starred. - * - * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: - */ - get: operations["activity/list-repos-starred-by-user"]; - }; - "/users/{username}/subscriptions": { - /** Lists repositories a user is watching. */ - get: operations["activity/list-repos-watched-by-user"]; - }; - "/zen": { - /** Get a random sentence from the Zen of GitHub */ - get: operations["meta/get-zen"]; - }; - "/repos/{owner}/{repo}/compare/{base}...{head}": { - /** - * Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. - * - * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. - * - * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. - * - * **Working with large comparisons** - * - * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." - * - * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - get: operations["repos/compare-commits"]; - }; -} -export interface components { - schemas: { - /** Simple User */ - "simple-user": { - login: string; - id: number; - node_id: string; - avatar_url: string; - gravatar_id: string | null; - url: string; - html_url: string; - followers_url: string; - following_url: string; - gists_url: string; - starred_url: string; - subscriptions_url: string; - organizations_url: string; - repos_url: string; - events_url: string; - received_events_url: string; - type: string; - site_admin: boolean; - starred_at?: string; - } | null; - /** GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. */ - integration: { - /** Unique identifier of the GitHub app */ - id: number; - /** The slug name of the GitHub app */ - slug?: string; - node_id: string; - owner: components["schemas"]["simple-user"] | null; - /** The name of the GitHub app */ - name: string; - description: string | null; - external_url: string; - html_url: string; - created_at: string; - updated_at: string; - /** The set of permissions for the GitHub app */ - permissions: { - issues?: string; - checks?: string; - metadata?: string; - contents?: string; - deployments?: string; - } & { - [key: string]: string; - }; - /** The list of events for the GitHub app */ - events: string[]; - /** The number of installations associated with the GitHub app */ - installations_count?: number; - client_id?: string; - client_secret?: string; - webhook_secret?: string | null; - pem?: string; - } & { - [key: string]: any; - }; - /** Basic Error */ - "basic-error": { - message?: string; - documentation_url?: string; - url?: string; - status?: string; - }; - /** Validation Error Simple */ - "validation-error-simple": { - message: string; - documentation_url: string; - errors?: string[]; - }; - /** The URL to which the payloads will be delivered. */ - "webhook-config-url": string; - /** The media type used to serialize the payloads. Supported values include `json` and `form`. The default is `form`. */ - "webhook-config-content-type": string; - /** If provided, the `secret` will be used as the `key` to generate the HMAC hex digest value for [delivery signature headers](https://docs.github.com/webhooks/event-payloads/#delivery-headers). */ - "webhook-config-secret": string; - "webhook-config-insecure-ssl": string | number; - /** Configuration object of the webhook */ - "webhook-config": { - url?: components["schemas"]["webhook-config-url"]; - content_type?: components["schemas"]["webhook-config-content-type"]; - secret?: components["schemas"]["webhook-config-secret"]; - insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; - }; - /** An enterprise account */ - enterprise: { - /** A short description of the enterprise. */ - description?: string | null; - html_url: string; - /** The enterprise's website URL. */ - website_url?: string | null; - /** Unique identifier of the enterprise */ - id: number; - node_id: string; - /** The name of the enterprise. */ - name: string; - /** The slug url identifier for the enterprise. */ - slug: string; - created_at: string | null; - updated_at: string | null; - avatar_url: string; - }; - /** Installation */ - installation: { - /** The ID of the installation. */ - id: number; - account: (Partial & Partial) | null; - /** Describe whether all repositories have been selected or there's a selection involved */ - repository_selection: "all" | "selected"; - access_tokens_url: string; - repositories_url: string; - html_url: string; - app_id: number; - /** The ID of the user or organization this token is being scoped to. */ - target_id: number; - target_type: string; - permissions: { - deployments?: string; - checks?: string; - metadata?: string; - contents?: string; - pull_requests?: string; - statuses?: string; - issues?: string; - organization_administration?: string; - }; - events: string[]; - created_at: string; - updated_at: string; - single_file_name: string | null; - has_multiple_single_files?: boolean; - single_file_paths?: string[]; - app_slug: string; - suspended_by: components["schemas"]["simple-user"] | null; - suspended_at: string | null; - contact_email?: string | null; - }; - /** The permissions granted to the user-to-server access token. */ - "app-permissions": { - /** The level of permission to grant the access token for GitHub Actions workflows, workflow runs, and artifacts. Can be one of: `read` or `write`. */ - actions?: "read" | "write"; - /** The level of permission to grant the access token for repository creation, deletion, settings, teams, and collaborators creation. Can be one of: `read` or `write`. */ - administration?: "read" | "write"; - /** The level of permission to grant the access token for checks on code. Can be one of: `read` or `write`. */ - checks?: "read" | "write"; - /** The level of permission to grant the access token for notification of content references and creation content attachments. Can be one of: `read` or `write`. */ - content_references?: "read" | "write"; - /** The level of permission to grant the access token for repository contents, commits, branches, downloads, releases, and merges. Can be one of: `read` or `write`. */ - contents?: "read" | "write"; - /** The level of permission to grant the access token for deployments and deployment statuses. Can be one of: `read` or `write`. */ - deployments?: "read" | "write"; - /** The level of permission to grant the access token for managing repository environments. Can be one of: `read` or `write`. */ - environments?: "read" | "write"; - /** The level of permission to grant the access token for issues and related comments, assignees, labels, and milestones. Can be one of: `read` or `write`. */ - issues?: "read" | "write"; - /** The level of permission to grant the access token to search repositories, list collaborators, and access repository metadata. Can be one of: `read` or `write`. */ - metadata?: "read" | "write"; - /** The level of permission to grant the access token for packages published to GitHub Packages. Can be one of: `read` or `write`. */ - packages?: "read" | "write"; - /** The level of permission to grant the access token to retrieve Pages statuses, configuration, and builds, as well as create new builds. Can be one of: `read` or `write`. */ - pages?: "read" | "write"; - /** The level of permission to grant the access token for pull requests and related comments, assignees, labels, milestones, and merges. Can be one of: `read` or `write`. */ - pull_requests?: "read" | "write"; - /** The level of permission to grant the access token to manage the post-receive hooks for a repository. Can be one of: `read` or `write`. */ - repository_hooks?: "read" | "write"; - /** The level of permission to grant the access token to manage repository projects, columns, and cards. Can be one of: `read`, `write`, or `admin`. */ - repository_projects?: "read" | "write" | "admin"; - /** The level of permission to grant the access token to view and manage secret scanning alerts. Can be one of: `read` or `write`. */ - secret_scanning_alerts?: "read" | "write"; - /** The level of permission to grant the access token to manage repository secrets. Can be one of: `read` or `write`. */ - secrets?: "read" | "write"; - /** The level of permission to grant the access token to view and manage security events like code scanning alerts. Can be one of: `read` or `write`. */ - security_events?: "read" | "write"; - /** The level of permission to grant the access token to manage just a single file. Can be one of: `read` or `write`. */ - single_file?: "read" | "write"; - /** The level of permission to grant the access token for commit statuses. Can be one of: `read` or `write`. */ - statuses?: "read" | "write"; - /** The level of permission to grant the access token to retrieve Dependabot alerts. Can be one of: `read`. */ - vulnerability_alerts?: "read"; - /** The level of permission to grant the access token to update GitHub Actions workflow files. Can be one of: `write`. */ - workflows?: "write"; - /** The level of permission to grant the access token for organization teams and members. Can be one of: `read` or `write`. */ - members?: "read" | "write"; - /** The level of permission to grant the access token to manage access to an organization. Can be one of: `read` or `write`. */ - organization_administration?: "read" | "write"; - /** The level of permission to grant the access token to manage the post-receive hooks for an organization. Can be one of: `read` or `write`. */ - organization_hooks?: "read" | "write"; - /** The level of permission to grant the access token for viewing an organization's plan. Can be one of: `read`. */ - organization_plan?: "read"; - /** The level of permission to grant the access token to manage organization projects, columns, and cards. Can be one of: `read`, `write`, or `admin`. */ - organization_projects?: "read" | "write" | "admin"; - /** The level of permission to grant the access token to manage organization secrets. Can be one of: `read` or `write`. */ - organization_secrets?: "read" | "write"; - /** The level of permission to grant the access token to view and manage GitHub Actions self-hosted runners available to an organization. Can be one of: `read` or `write`. */ - organization_self_hosted_runners?: "read" | "write"; - /** The level of permission to grant the access token to view and manage users blocked by the organization. Can be one of: `read` or `write`. */ - organization_user_blocking?: "read" | "write"; - /** The level of permission to grant the access token to manage team discussions and related comments. Can be one of: `read` or `write`. */ - team_discussions?: "read" | "write"; - }; - /** License Simple */ - "license-simple": { - key: string; - name: string; - url: string | null; - spdx_id: string | null; - node_id: string; - html_url?: string; - }; - /** A git repository */ - repository: { - /** Unique identifier of the repository */ - id: number; - node_id: string; - /** The name of the repository. */ - name: string; - full_name: string; - license: components["schemas"]["license-simple"] | null; - organization?: components["schemas"]["simple-user"] | null; - forks: number; - permissions?: { - admin: boolean; - pull: boolean; - triage?: boolean; - push: boolean; - maintain?: boolean; - }; - owner: components["schemas"]["simple-user"] | null; - /** Whether the repository is private or public. */ - private: boolean; - html_url: string; - description: string | null; - fork: boolean; - url: string; - archive_url: string; - assignees_url: string; - blobs_url: string; - branches_url: string; - collaborators_url: string; - comments_url: string; - commits_url: string; - compare_url: string; - contents_url: string; - contributors_url: string; - deployments_url: string; - downloads_url: string; - events_url: string; - forks_url: string; - git_commits_url: string; - git_refs_url: string; - git_tags_url: string; - git_url: string; - issue_comment_url: string; - issue_events_url: string; - issues_url: string; - keys_url: string; - labels_url: string; - languages_url: string; - merges_url: string; - milestones_url: string; - notifications_url: string; - pulls_url: string; - releases_url: string; - ssh_url: string; - stargazers_url: string; - statuses_url: string; - subscribers_url: string; - subscription_url: string; - tags_url: string; - teams_url: string; - trees_url: string; - clone_url: string; - mirror_url: string | null; - hooks_url: string; - svn_url: string; - homepage: string | null; - language: string | null; - forks_count: number; - stargazers_count: number; - watchers_count: number; - size: number; - /** The default branch of the repository. */ - default_branch: string; - open_issues_count: number; - /** Whether this repository acts as a template that can be used to generate new repositories. */ - is_template?: boolean; - topics?: string[]; - /** Whether issues are enabled. */ - has_issues: boolean; - /** Whether projects are enabled. */ - has_projects: boolean; - /** Whether the wiki is enabled. */ - has_wiki: boolean; - has_pages: boolean; - /** Whether downloads are enabled. */ - has_downloads: boolean; - /** Whether the repository is archived. */ - archived: boolean; - /** Returns whether or not this repository disabled. */ - disabled: boolean; - /** The repository visibility: public, private, or internal. */ - visibility?: string; - pushed_at: string | null; - created_at: string | null; - updated_at: string | null; - /** Whether to allow rebase merges for pull requests. */ - allow_rebase_merge?: boolean; - template_repository?: { - id?: number; - node_id?: string; - name?: string; - full_name?: string; - owner?: { - login?: string; - id?: number; - node_id?: string; - avatar_url?: string; - gravatar_id?: string; - url?: string; - html_url?: string; - followers_url?: string; - following_url?: string; - gists_url?: string; - starred_url?: string; - subscriptions_url?: string; - organizations_url?: string; - repos_url?: string; - events_url?: string; - received_events_url?: string; - type?: string; - site_admin?: boolean; - }; - private?: boolean; - html_url?: string; - description?: string; - fork?: boolean; - url?: string; - archive_url?: string; - assignees_url?: string; - blobs_url?: string; - branches_url?: string; - collaborators_url?: string; - comments_url?: string; - commits_url?: string; - compare_url?: string; - contents_url?: string; - contributors_url?: string; - deployments_url?: string; - downloads_url?: string; - events_url?: string; - forks_url?: string; - git_commits_url?: string; - git_refs_url?: string; - git_tags_url?: string; - git_url?: string; - issue_comment_url?: string; - issue_events_url?: string; - issues_url?: string; - keys_url?: string; - labels_url?: string; - languages_url?: string; - merges_url?: string; - milestones_url?: string; - notifications_url?: string; - pulls_url?: string; - releases_url?: string; - ssh_url?: string; - stargazers_url?: string; - statuses_url?: string; - subscribers_url?: string; - subscription_url?: string; - tags_url?: string; - teams_url?: string; - trees_url?: string; - clone_url?: string; - mirror_url?: string; - hooks_url?: string; - svn_url?: string; - homepage?: string; - language?: string; - forks_count?: number; - stargazers_count?: number; - watchers_count?: number; - size?: number; - default_branch?: string; - open_issues_count?: number; - is_template?: boolean; - topics?: string[]; - has_issues?: boolean; - has_projects?: boolean; - has_wiki?: boolean; - has_pages?: boolean; - has_downloads?: boolean; - archived?: boolean; - disabled?: boolean; - visibility?: string; - pushed_at?: string; - created_at?: string; - updated_at?: string; - permissions?: { - admin?: boolean; - push?: boolean; - pull?: boolean; - }; - allow_rebase_merge?: boolean; - temp_clone_token?: string; - allow_squash_merge?: boolean; - delete_branch_on_merge?: boolean; - allow_merge_commit?: boolean; - subscribers_count?: number; - network_count?: number; - } | null; - temp_clone_token?: string; - /** Whether to allow squash merges for pull requests. */ - allow_squash_merge?: boolean; - /** Whether to delete head branches when pull requests are merged */ - delete_branch_on_merge?: boolean; - /** Whether to allow merge commits for pull requests. */ - allow_merge_commit?: boolean; - subscribers_count?: number; - network_count?: number; - open_issues: number; - watchers: number; - master_branch?: string; - starred_at?: string; - }; - /** Authentication token for a GitHub App installed on a user or org. */ - "installation-token": { - token: string; - expires_at: string; - permissions?: { - issues?: string; - contents?: string; - metadata?: string; - single_file?: string; - }; - repository_selection?: "all" | "selected"; - repositories?: components["schemas"]["repository"][]; - single_file?: string; - has_multiple_single_files?: boolean; - single_file_paths?: string[]; - }; - /** Validation Error */ - "validation-error": { - message: string; - documentation_url: string; - errors?: { - resource?: string; - field?: string; - message?: string; - code: string; - index?: number; - value?: (string | null) | (number | null) | (string[] | null); - }[]; - }; - /** The authorization associated with an OAuth Access. */ - "application-grant": { - id: number; - url: string; - app: { - client_id: string; - name: string; - url: string; - }; - created_at: string; - updated_at: string; - scopes: string[]; - user?: components["schemas"]["simple-user"] | null; - }; - "scoped-installation": { - permissions: components["schemas"]["app-permissions"]; - /** Describe whether all repositories have been selected or there's a selection involved */ - repository_selection: "all" | "selected"; - single_file_name: string | null; - has_multiple_single_files?: boolean; - single_file_paths?: string[]; - repositories_url: string; - account: components["schemas"]["simple-user"]; - }; - /** The authorization for an OAuth app, GitHub App, or a Personal Access Token. */ - authorization: { - id: number; - url: string; - /** A list of scopes that this authorization is in. */ - scopes: string[] | null; - token: string; - token_last_eight: string | null; - hashed_token: string | null; - app: { - client_id: string; - name: string; - url: string; - }; - note: string | null; - note_url: string | null; - updated_at: string; - created_at: string; - fingerprint: string | null; - user?: components["schemas"]["simple-user"] | null; - installation?: components["schemas"]["scoped-installation"] | null; - }; - /** Code Of Conduct */ - "code-of-conduct": { - key: string; - name: string; - url: string; - body?: string; - html_url: string | null; - }; - /** Content Reference attachments allow you to provide context around URLs posted in comments */ - "content-reference-attachment": { - /** The ID of the attachment */ - id: number; - /** The title of the attachment */ - title: string; - /** The body of the attachment */ - body: string; - /** The node_id of the content attachment */ - node_id?: string; - }; - /** The policy that controls the organizations in the enterprise that are allowed to run GitHub Actions. Can be one of: `all`, `none`, or `selected`. */ - "enabled-organizations": "all" | "none" | "selected"; - /** The permissions policy that controls the actions that are allowed to run. Can be one of: `all`, `local_only`, or `selected`. */ - "allowed-actions": "all" | "local_only" | "selected"; - /** The API URL to use to get or set the actions that are allowed to run, when `allowed_actions` is set to `selected`. */ - "selected-actions-url": string; - "actions-enterprise-permissions": { - enabled_organizations: components["schemas"]["enabled-organizations"]; - /** The API URL to use to get or set the selected organizations that are allowed to run GitHub Actions, when `enabled_organizations` is set to `selected`. */ - selected_organizations_url?: string; - allowed_actions: components["schemas"]["allowed-actions"]; - selected_actions_url?: components["schemas"]["selected-actions-url"]; - }; - /** Organization Simple */ - "organization-simple": { - login: string; - id: number; - node_id: string; - url: string; - repos_url: string; - events_url: string; - hooks_url: string; - issues_url: string; - members_url: string; - public_members_url: string; - avatar_url: string; - description: string | null; - }; - "selected-actions": { - /** Whether GitHub-owned actions are allowed. For example, this includes the actions in the `actions` organization. */ - github_owned_allowed?: boolean; - /** Whether actions in GitHub Marketplace from verified creators are allowed. Set to `true` to allow all GitHub Marketplace actions by verified creators. */ - verified_allowed?: boolean; - /** Specifies a list of string-matching patterns to allow specific action(s). Wildcards, tags, and SHAs are allowed. For example, `monalisa/octocat@*`, `monalisa/octocat@v2`, `monalisa/*`." */ - patterns_allowed?: string[]; - }; - "runner-groups-enterprise": { - id: number; - name: string; - visibility: string; - default: boolean; - selected_organizations_url?: string; - runners_url: string; - allows_public_repositories: boolean; - }; - /** A self hosted runner */ - runner: { - /** The id of the runner. */ - id: number; - /** The name of the runner. */ - name: string; - /** The Operating System of the runner. */ - os: string; - /** The status of the runner. */ - status: string; - busy: boolean; - labels: { - /** Unique identifier of the label. */ - id?: number; - /** Name of the label. */ - name?: string; - /** The type of label. Read-only labels are applied automatically when the runner is configured. */ - type?: "read-only" | "custom"; - }[]; - }; - /** Runner Application */ - "runner-application": { - os: string; - architecture: string; - download_url: string; - filename: string; - /** A short lived bearer token used to download the runner, if needed. */ - temp_download_token?: string; - sha256_checksum?: string; - }; - /** Authentication Token */ - "authentication-token": { - /** The token used for authentication */ - token: string; - /** The time this token expires */ - expires_at: string; - permissions?: { - [key: string]: any; - }; - /** The repositories this token has access to */ - repositories?: components["schemas"]["repository"][]; - single_file?: string | null; - /** Describe whether all repositories have been selected or there's a selection involved */ - repository_selection?: "all" | "selected"; - }; - "audit-log-event": { - /** The time the audit log event occurred, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). */ - "@timestamp"?: number; - /** The name of the action that was performed, for example `user.login` or `repo.create`. */ - action?: string; - active?: boolean; - active_was?: boolean; - /** The actor who performed the action. */ - actor?: string; - /** The username of the account being blocked. */ - blocked_user?: string; - business?: string; - config?: any[]; - config_was?: any[]; - content_type?: string; - /** The time the audit log event was recorded, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). */ - created_at?: number; - deploy_key_fingerprint?: string; - emoji?: string; - events?: any[]; - events_were?: any[]; - explanation?: string; - fingerprint?: string; - hook_id?: number; - limited_availability?: boolean; - message?: string; - name?: string; - old_user?: string; - openssh_public_key?: string; - org?: string; - previous_visibility?: string; - read_only?: boolean; - /** The name of the repository. */ - repo?: string; - /** The name of the repository. */ - repository?: string; - repository_public?: boolean; - target_login?: string; - team?: string; - /** The type of protocol (for example, HTTP or SSH) used to transfer Git data. */ - transport_protocol?: number; - /** A human readable name for the protocol (for example, HTTP or SSH) used to transfer Git data. */ - transport_protocol_name?: string; - /** The user that was affected by the action performed (if available). */ - user?: string; - /** The repository visibility, for example `public` or `private`. */ - visibility?: string; - }; - "actions-billing-usage": { - /** The sum of the free and paid GitHub Actions minutes used. */ - total_minutes_used: number; - /** The total paid GitHub Actions minutes used. */ - total_paid_minutes_used: number; - /** The amount of free GitHub Actions minutes available. */ - included_minutes: number; - minutes_used_breakdown: { - /** Total minutes used on Ubuntu runner machines. */ - UBUNTU?: number; - /** Total minutes used on macOS runner machines. */ - MACOS?: number; - /** Total minutes used on Windows runner machines. */ - WINDOWS?: number; - }; - }; - "packages-billing-usage": { - /** Sum of the free and paid storage space (GB) for GitHuub Packages. */ - total_gigabytes_bandwidth_used: number; - /** Total paid storage space (GB) for GitHuub Packages. */ - total_paid_gigabytes_bandwidth_used: number; - /** Free storage space (GB) for GitHub Packages. */ - included_gigabytes_bandwidth: number; - }; - "combined-billing-usage": { - /** Numbers of days left in billing cycle. */ - days_left_in_billing_cycle: number; - /** Estimated storage space (GB) used in billing cycle. */ - estimated_paid_storage_for_month: number; - /** Estimated sum of free and paid storage space (GB) used in billing cycle. */ - estimated_storage_for_month: number; - }; - /** Actor */ - actor: { - id: number; - login: string; - display_login?: string; - gravatar_id: string | null; - url: string; - avatar_url: string; - }; - /** Color-coded labels help you categorize and filter your issues (just like labels in Gmail). */ - label: { - id: number; - node_id: string; - /** URL for the label */ - url: string; - /** The name of the label. */ - name: string; - description: string | null; - /** 6-character hex code, without the leading #, identifying the color */ - color: string; - default: boolean; - }; - /** A collection of related issues and pull requests. */ - milestone: { - url: string; - html_url: string; - labels_url: string; - id: number; - node_id: string; - /** The number of the milestone. */ - number: number; - /** The state of the milestone. */ - state: "open" | "closed"; - /** The title of the milestone. */ - title: string; - description: string | null; - creator: components["schemas"]["simple-user"] | null; - open_issues: number; - closed_issues: number; - created_at: string; - updated_at: string; - closed_at: string | null; - due_on: string | null; - }; - /** How the author is associated with the repository. */ - author_association: "COLLABORATOR" | "CONTRIBUTOR" | "FIRST_TIMER" | "FIRST_TIME_CONTRIBUTOR" | "MANNEQUIN" | "MEMBER" | "NONE" | "OWNER"; - /** Issue Simple */ - "issue-simple": { - id: number; - node_id: string; - url: string; - repository_url: string; - labels_url: string; - comments_url: string; - events_url: string; - html_url: string; - number: number; - state: string; - title: string; - body?: string; - user: components["schemas"]["simple-user"] | null; - labels: components["schemas"]["label"][]; - assignee: components["schemas"]["simple-user"] | null; - assignees?: components["schemas"]["simple-user"][] | null; - milestone: components["schemas"]["milestone"] | null; - locked: boolean; - active_lock_reason?: string | null; - comments: number; - pull_request?: { - merged_at?: string | null; - diff_url: string | null; - html_url: string | null; - patch_url: string | null; - url: string | null; - }; - closed_at: string | null; - created_at: string; - updated_at: string; - author_association: components["schemas"]["author_association"]; - body_html?: string; - body_text?: string; - timeline_url?: string; - repository?: components["schemas"]["repository"]; - performed_via_github_app?: components["schemas"]["integration"] | null; - }; - "reaction-rollup": { - url: string; - total_count: number; - "+1": number; - "-1": number; - laugh: number; - confused: number; - heart: number; - hooray: number; - eyes: number; - rocket: number; - }; - /** Comments provide a way for people to collaborate on an issue. */ - "issue-comment": { - /** Unique identifier of the issue comment */ - id: number; - node_id: string; - /** URL for the issue comment */ - url: string; - /** Contents of the issue comment */ - body?: string; - body_text?: string; - body_html?: string; - html_url: string; - user: components["schemas"]["simple-user"] | null; - created_at: string; - updated_at: string; - issue_url: string; - author_association: components["schemas"]["author_association"]; - performed_via_github_app?: components["schemas"]["integration"] | null; - reactions?: components["schemas"]["reaction-rollup"]; - }; - /** Event */ - event: { - id: string; - type: string | null; - actor: components["schemas"]["actor"]; - repo: { - id: number; - name: string; - url: string; - }; - org?: components["schemas"]["actor"]; - payload: { - action?: string; - issue?: components["schemas"]["issue-simple"]; - comment?: components["schemas"]["issue-comment"]; - pages?: { - page_name?: string; - title?: string; - summary?: string | null; - action?: string; - sha?: string; - html_url?: string; - }[]; - }; - public: boolean; - created_at: string | null; - }; - /** Hypermedia Link with Type */ - "link-with-type": { - href: string; - type: string; - }; - /** Feed */ - feed: { - timeline_url: string; - user_url: string; - current_user_public_url?: string; - current_user_url?: string; - current_user_actor_url?: string; - current_user_organization_url?: string; - current_user_organization_urls?: string[]; - security_advisories_url?: string; - _links: { - timeline: components["schemas"]["link-with-type"]; - user: components["schemas"]["link-with-type"]; - security_advisories?: components["schemas"]["link-with-type"]; - current_user?: components["schemas"]["link-with-type"]; - current_user_public?: components["schemas"]["link-with-type"]; - current_user_actor?: components["schemas"]["link-with-type"]; - current_user_organization?: components["schemas"]["link-with-type"]; - current_user_organizations?: components["schemas"]["link-with-type"][]; - }; - }; - /** Base Gist */ - "base-gist": { - url: string; - forks_url: string; - commits_url: string; - id: string; - node_id: string; - git_pull_url: string; - git_push_url: string; - html_url: string; - files: { - [key: string]: { - filename?: string; - type?: string; - language?: string; - raw_url?: string; - size?: number; - }; - }; - public: boolean; - created_at: string; - updated_at: string; - description: string | null; - comments: number; - user: components["schemas"]["simple-user"] | null; - comments_url: string; - owner?: components["schemas"]["simple-user"] | null; - truncated?: boolean; - forks?: { - [key: string]: any; - }[]; - history?: { - [key: string]: any; - }[]; - }; - /** Gist Simple */ - "gist-simple": { - forks?: { - [key: string]: any; - }[] | null; - history?: { - [key: string]: any; - }[] | null; - /** Gist */ - fork_of?: { - url: string; - forks_url: string; - commits_url: string; - id: string; - node_id: string; - git_pull_url: string; - git_push_url: string; - html_url: string; - files: { - [key: string]: { - filename?: string; - type?: string; - language?: string; - raw_url?: string; - size?: number; - }; - }; - public: boolean; - created_at: string; - updated_at: string; - description: string | null; - comments: number; - user: components["schemas"]["simple-user"] | null; - comments_url: string; - owner?: components["schemas"]["simple-user"] | null; - truncated?: boolean; - forks?: { - [key: string]: any; - }[]; - history?: { - [key: string]: any; - }[]; - } | null; - url?: string; - forks_url?: string; - commits_url?: string; - id?: string; - node_id?: string; - git_pull_url?: string; - git_push_url?: string; - html_url?: string; - files?: { - [key: string]: { - filename?: string; - type?: string; - language?: string; - raw_url?: string; - size?: number; - truncated?: boolean; - content?: string; - } | null; - }; - public?: boolean; - created_at?: string; - updated_at?: string; - description?: string | null; - comments?: number; - user?: string | null; - comments_url?: string; - owner?: components["schemas"]["simple-user"]; - truncated?: boolean; - }; - /** A comment made to a gist. */ - "gist-comment": { - id: number; - node_id: string; - url: string; - /** The comment text. */ - body: string; - user: components["schemas"]["simple-user"] | null; - created_at: string; - updated_at: string; - author_association: components["schemas"]["author_association"]; - }; - /** Gist Commit */ - "gist-commit": { - url: string; - version: string; - user: components["schemas"]["simple-user"] | null; - change_status: { - total?: number; - additions?: number; - deletions?: number; - }; - committed_at: string; - }; - /** Gitignore Template */ - "gitignore-template": { - name: string; - source: string; - }; - /** Issues are a great way to keep track of tasks, enhancements, and bugs for your projects. */ - issue: { - id: number; - node_id: string; - /** URL for the issue */ - url: string; - repository_url: string; - labels_url: string; - comments_url: string; - events_url: string; - html_url: string; - /** Number uniquely identifying the issue within its repository */ - number: number; - /** State of the issue; either 'open' or 'closed' */ - state: string; - /** Title of the issue */ - title: string; - /** Contents of the issue */ - body?: string | null; - user: components["schemas"]["simple-user"] | null; - /** Labels to associate with this issue; pass one or more label names to replace the set of labels on this issue; send an empty array to clear all labels from the issue; note that the labels are silently dropped for users without push access to the repository */ - labels: (string | { - id?: number; - node_id?: string; - url?: string; - name?: string; - description?: string | null; - color?: string | null; - default?: boolean; - })[]; - assignee: components["schemas"]["simple-user"] | null; - assignees?: components["schemas"]["simple-user"][] | null; - milestone: components["schemas"]["milestone"] | null; - locked: boolean; - active_lock_reason?: string | null; - comments: number; - pull_request?: { - merged_at?: string | null; - diff_url: string | null; - html_url: string | null; - patch_url: string | null; - url: string | null; - }; - closed_at: string | null; - created_at: string; - updated_at: string; - closed_by?: components["schemas"]["simple-user"] | null; - body_html?: string; - body_text?: string; - timeline_url?: string; - repository?: components["schemas"]["repository"]; - performed_via_github_app?: components["schemas"]["integration"] | null; - author_association: components["schemas"]["author_association"]; - reactions?: components["schemas"]["reaction-rollup"]; - }; - /** License */ - license: { - key: string; - name: string; - spdx_id: string | null; - url: string | null; - node_id: string; - html_url: string; - description: string; - implementation: string; - permissions: string[]; - conditions: string[]; - limitations: string[]; - body: string; - featured: boolean; - }; - /** Marketplace Listing Plan */ - "marketplace-listing-plan": { - url: string; - accounts_url: string; - id: number; - number: number; - name: string; - description: string; - monthly_price_in_cents: number; - yearly_price_in_cents: number; - price_model: string; - has_free_trial: boolean; - unit_name: string | null; - state: string; - bullets: string[]; - }; - /** Marketplace Purchase */ - "marketplace-purchase": { - url: string; - type: string; - id: number; - login: string; - organization_billing_email?: string; - email?: string | null; - marketplace_pending_change?: { - is_installed?: boolean; - effective_date?: string; - unit_count?: number | null; - id?: number; - plan?: components["schemas"]["marketplace-listing-plan"]; - } | null; - marketplace_purchase: { - billing_cycle?: string; - next_billing_date?: string | null; - is_installed?: boolean; - unit_count?: number | null; - on_free_trial?: boolean; - free_trial_ends_on?: string | null; - updated_at?: string; - plan?: components["schemas"]["marketplace-listing-plan"]; - }; - }; - /** Api Overview */ - "api-overview": { - verifiable_password_authentication: boolean; - ssh_key_fingerprints?: { - SHA256_RSA?: string; - SHA256_DSA?: string; - }; - hooks?: string[]; - web?: string[]; - api?: string[]; - git?: string[]; - packages?: string[]; - pages?: string[]; - importer?: string[]; - actions?: string[]; - dependabot?: string[]; - }; - /** Minimal Repository */ - "minimal-repository": { - id: number; - node_id: string; - name: string; - full_name: string; - owner: components["schemas"]["simple-user"] | null; - private: boolean; - html_url: string; - description: string | null; - fork: boolean; - url: string; - archive_url: string; - assignees_url: string; - blobs_url: string; - branches_url: string; - collaborators_url: string; - comments_url: string; - commits_url: string; - compare_url: string; - contents_url: string; - contributors_url: string; - deployments_url: string; - downloads_url: string; - events_url: string; - forks_url: string; - git_commits_url: string; - git_refs_url: string; - git_tags_url: string; - git_url?: string; - issue_comment_url: string; - issue_events_url: string; - issues_url: string; - keys_url: string; - labels_url: string; - languages_url: string; - merges_url: string; - milestones_url: string; - notifications_url: string; - pulls_url: string; - releases_url: string; - ssh_url?: string; - stargazers_url: string; - statuses_url: string; - subscribers_url: string; - subscription_url: string; - tags_url: string; - teams_url: string; - trees_url: string; - clone_url?: string; - mirror_url?: string | null; - hooks_url: string; - svn_url?: string; - homepage?: string | null; - language?: string | null; - forks_count?: number; - stargazers_count?: number; - watchers_count?: number; - size?: number; - default_branch?: string; - open_issues_count?: number; - is_template?: boolean; - topics?: string[]; - has_issues?: boolean; - has_projects?: boolean; - has_wiki?: boolean; - has_pages?: boolean; - has_downloads?: boolean; - archived?: boolean; - disabled?: boolean; - visibility?: string; - pushed_at?: string | null; - created_at?: string | null; - updated_at?: string | null; - permissions?: { - admin?: boolean; - push?: boolean; - pull?: boolean; - }; - template_repository?: components["schemas"]["repository"] | null; - temp_clone_token?: string; - delete_branch_on_merge?: boolean; - subscribers_count?: number; - network_count?: number; - license?: { - key?: string; - name?: string; - spdx_id?: string; - url?: string; - node_id?: string; - } | null; - forks?: number; - open_issues?: number; - watchers?: number; - }; - /** Thread */ - thread: { - id: string; - repository: components["schemas"]["minimal-repository"]; - subject: { - title: string; - url: string; - latest_comment_url: string; - type: string; - }; - reason: string; - unread: boolean; - updated_at: string; - last_read_at: string | null; - url: string; - subscription_url: string; - }; - /** Thread Subscription */ - "thread-subscription": { - subscribed: boolean; - ignored: boolean; - reason: string | null; - created_at: string | null; - url: string; - thread_url?: string; - repository_url?: string; - }; - /** Organization Full */ - "organization-full": { - login: string; - id: number; - node_id: string; - url: string; - repos_url: string; - events_url: string; - hooks_url: string; - issues_url: string; - members_url: string; - public_members_url: string; - avatar_url: string; - description: string | null; - name?: string; - company?: string; - blog?: string; - location?: string; - email?: string; - twitter_username?: string | null; - is_verified?: boolean; - has_organization_projects: boolean; - has_repository_projects: boolean; - public_repos: number; - public_gists: number; - followers: number; - following: number; - html_url: string; - created_at: string; - type: string; - total_private_repos?: number; - owned_private_repos?: number; - private_gists?: number | null; - disk_usage?: number | null; - collaborators?: number | null; - billing_email?: string | null; - plan?: { - name: string; - space: number; - private_repos: number; - filled_seats?: number; - seats?: number; - }; - default_repository_permission?: string | null; - members_can_create_repositories?: boolean | null; - two_factor_requirement_enabled?: boolean | null; - members_allowed_repository_creation_type?: string; - members_can_create_public_repositories?: boolean; - members_can_create_private_repositories?: boolean; - members_can_create_internal_repositories?: boolean; - members_can_create_pages?: boolean; - members_can_create_public_pages?: boolean; - members_can_create_private_pages?: boolean; - updated_at: string; - }; - /** The policy that controls the repositories in the organization that are allowed to run GitHub Actions. Can be one of: `all`, `none`, or `selected`. */ - "enabled-repositories": "all" | "none" | "selected"; - "actions-organization-permissions": { - enabled_repositories: components["schemas"]["enabled-repositories"]; - /** The API URL to use to get or set the selected repositories that are allowed to run GitHub Actions, when `enabled_repositories` is set to `selected`. */ - selected_repositories_url?: string; - allowed_actions?: components["schemas"]["allowed-actions"]; - selected_actions_url?: components["schemas"]["selected-actions-url"]; - }; - "runner-groups-org": { - id: number; - name: string; - visibility: string; - default: boolean; - /** Link to the selected repositories resource for this runner group. Not present unless visibility was set to `selected` */ - selected_repositories_url?: string; - runners_url: string; - inherited: boolean; - inherited_allows_public_repositories?: boolean; - allows_public_repositories: boolean; - }; - /** Secrets for GitHub Actions for an organization. */ - "organization-actions-secret": { - /** The name of the secret. */ - name: string; - created_at: string; - updated_at: string; - /** Visibility of a secret */ - visibility: "all" | "private" | "selected"; - selected_repositories_url?: string; - }; - /** The public key used for setting Actions Secrets. */ - "actions-public-key": { - /** The identifier for the key. */ - key_id: string; - /** The Base64 encoded public key. */ - key: string; - id?: number; - url?: string; - title?: string; - created_at?: string; - }; - /** Credential Authorization */ - "credential-authorization": { - /** User login that owns the underlying credential. */ - login: string; - /** Unique identifier for the credential. */ - credential_id: number; - /** Human-readable description of the credential type. */ - credential_type: string; - /** Last eight characters of the credential. Only included in responses with credential_type of personal access token. */ - token_last_eight?: string; - /** Date when the credential was authorized for use. */ - credential_authorized_at: string; - /** List of oauth scopes the token has been granted. */ - scopes?: string[]; - /** Unique string to distinguish the credential. Only included in responses with credential_type of SSH Key. */ - fingerprint?: string; - /** Date when the credential was last accessed. May be null if it was never accessed */ - credential_accessed_at?: string | null; - authorized_credential_id?: number | null; - /** The title given to the ssh key. This will only be present when the credential is an ssh key. */ - authorized_credential_title?: string | null; - /** The note given to the token. This will only be present when the credential is a token. */ - authorized_credential_note?: string | null; - }; - /** Organization Invitation */ - "organization-invitation": { - id: number; - login: string | null; - email: string | null; - role: string; - created_at: string; - failed_at?: string | null; - failed_reason?: string | null; - inviter: components["schemas"]["simple-user"]; - team_count: number; - node_id: string; - invitation_teams_url: string; - }; - /** Org Hook */ - "org-hook": { - id: number; - url: string; - ping_url: string; - name: string; - events: string[]; - active: boolean; - config: { - url?: string; - insecure_ssl?: string; - content_type?: string; - secret?: string; - }; - updated_at: string; - created_at: string; - type: string; - }; - /** The type of GitHub user that can comment, open issues, or create pull requests while the interaction limit is in effect. Can be one of: `existing_users`, `contributors_only`, `collaborators_only`. */ - "interaction-group": "existing_users" | "contributors_only" | "collaborators_only"; - /** Interaction limit settings. */ - "interaction-limit-response": { - limit: components["schemas"]["interaction-group"]; - origin: string; - expires_at: string; - }; - /** The duration of the interaction restriction. Can be one of: `one_day`, `three_days`, `one_week`, `one_month`, `six_months`. Default: `one_day`. */ - "interaction-expiry": "one_day" | "three_days" | "one_week" | "one_month" | "six_months"; - /** Limit interactions to a specific type of user for a specified duration */ - "interaction-limit": { - limit: components["schemas"]["interaction-group"]; - expiry?: components["schemas"]["interaction-expiry"]; - }; - /** Groups of organization members that gives permissions on specified repositories. */ - "team-simple": { - /** Unique identifier of the team */ - id: number; - node_id: string; - /** URL for the team */ - url: string; - members_url: string; - /** Name of the team */ - name: string; - /** Description of the team */ - description: string | null; - /** Permission that the team will have for its repositories */ - permission: string; - /** The level of privacy this team should have */ - privacy?: string; - html_url: string; - repositories_url: string; - slug: string; - /** Distinguished Name (DN) that team maps to within LDAP environment */ - ldap_dn?: string; - } | null; - /** Groups of organization members that gives permissions on specified repositories. */ - team: { - id: number; - node_id: string; - name: string; - slug: string; - description: string | null; - privacy?: string; - permission: string; - url: string; - html_url: string; - members_url: string; - repositories_url: string; - parent?: components["schemas"]["team-simple"] | null; - }; - /** Org Membership */ - "org-membership": { - url: string; - /** The state of the member in the organization. The `pending` state indicates the user has not yet accepted an invitation. */ - state: "active" | "pending"; - /** The user's membership type in the organization. */ - role: "admin" | "member" | "billing_manager"; - organization_url: string; - organization: components["schemas"]["organization-simple"]; - user: components["schemas"]["simple-user"] | null; - permissions?: { - can_create_repository: boolean; - }; - }; - /** A migration. */ - migration: { - id: number; - owner: components["schemas"]["simple-user"] | null; - guid: string; - state: string; - lock_repositories: boolean; - exclude_attachments: boolean; - repositories: components["schemas"]["repository"][]; - url: string; - created_at: string; - updated_at: string; - node_id: string; - archive_url?: string; - exclude?: { - [key: string]: any; - }[]; - }; - /** A software package */ - package: { - /** Unique identifier of the package. */ - id: number; - /** The name of the package. */ - name: string; - package_type: "npm" | "maven" | "rubygems" | "docker" | "nuget" | "container"; - url: string; - html_url: string; - /** The number of versions of the package. */ - version_count: number; - visibility: "private" | "public"; - owner?: components["schemas"]["simple-user"] | null; - repository?: components["schemas"]["minimal-repository"] | null; - created_at: string; - updated_at: string; - }; - /** A version of a software package */ - "package-version": { - /** Unique identifier of the package version. */ - id: number; - /** The name of the package version. */ - name: string; - url: string; - package_html_url: string; - html_url?: string; - license?: string; - description?: string; - created_at: string; - updated_at: string; - deleted_at?: string; - metadata?: { - package_type: "npm" | "maven" | "rubygems" | "docker" | "nuget" | "container"; - container?: { - tags: any[]; - }; - docker?: { - tag?: any[]; - }; - }; - }; - /** Projects are a way to organize columns and cards of work. */ - project: { - owner_url: string; - url: string; - html_url: string; - columns_url: string; - id: number; - node_id: string; - /** Name of the project */ - name: string; - /** Body of the project */ - body: string | null; - number: number; - /** State of the project; either 'open' or 'closed' */ - state: string; - creator: components["schemas"]["simple-user"] | null; - created_at: string; - updated_at: string; - /** The baseline permission that all organization members have on this project. Only present if owner is an organization. */ - organization_permission?: "read" | "write" | "admin" | "none"; - /** Whether or not this project can be seen by everyone. Only present if owner is an organization. */ - private?: boolean; - }; - /** External Groups to be mapped to a team for membership */ - "group-mapping": { - /** Array of groups to be mapped to this team */ - groups?: { - /** The ID of the group */ - group_id: string; - /** The name of the group */ - group_name: string; - /** a description of the group */ - group_description: string; - /** synchronization status for this group mapping */ - status?: string; - /** the time of the last sync for this group-mapping */ - synced_at?: string | null; - }[]; - }; - /** Groups of organization members that gives permissions on specified repositories. */ - "team-full": { - /** Unique identifier of the team */ - id: number; - node_id: string; - /** URL for the team */ - url: string; - html_url: string; - /** Name of the team */ - name: string; - slug: string; - description: string | null; - /** The level of privacy this team should have */ - privacy?: "closed" | "secret"; - /** Permission that the team will have for its repositories */ - permission: string; - members_url: string; - repositories_url: string; - parent?: components["schemas"]["team-simple"] | null; - members_count: number; - repos_count: number; - created_at: string; - updated_at: string; - organization: components["schemas"]["organization-full"]; - /** Distinguished Name (DN) that team maps to within LDAP environment */ - ldap_dn?: string; - }; - /** A team discussion is a persistent record of a free-form conversation within a team. */ - "team-discussion": { - author: components["schemas"]["simple-user"] | null; - /** The main text of the discussion. */ - body: string; - body_html: string; - /** The current version of the body content. If provided, this update operation will be rejected if the given version does not match the latest version on the server. */ - body_version: string; - comments_count: number; - comments_url: string; - created_at: string; - last_edited_at: string | null; - html_url: string; - node_id: string; - /** The unique sequence number of a team discussion. */ - number: number; - /** Whether or not this discussion should be pinned for easy retrieval. */ - pinned: boolean; - /** Whether or not this discussion should be restricted to team members and organization administrators. */ - private: boolean; - team_url: string; - /** The title of the discussion. */ - title: string; - updated_at: string; - url: string; - reactions?: components["schemas"]["reaction-rollup"]; - }; - /** A reply to a discussion within a team. */ - "team-discussion-comment": { - author: components["schemas"]["simple-user"] | null; - /** The main text of the comment. */ - body: string; - body_html: string; - /** The current version of the body content. If provided, this update operation will be rejected if the given version does not match the latest version on the server. */ - body_version: string; - created_at: string; - last_edited_at: string | null; - discussion_url: string; - html_url: string; - node_id: string; - /** The unique sequence number of a team discussion comment. */ - number: number; - updated_at: string; - url: string; - reactions?: components["schemas"]["reaction-rollup"]; - }; - /** Reactions to conversations provide a way to help people express their feelings more simply and effectively. */ - reaction: { - id: number; - node_id: string; - user: components["schemas"]["simple-user"] | null; - /** The reaction to use */ - content: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - created_at: string; - }; - /** Team Membership */ - "team-membership": { - url: string; - /** The role of the user in the team. */ - role: "member" | "maintainer"; - /** The state of the user's membership in the team. */ - state: "active" | "pending"; - }; - /** A team's access to a project. */ - "team-project": { - owner_url: string; - url: string; - html_url: string; - columns_url: string; - id: number; - node_id: string; - name: string; - body: string | null; - number: number; - state: string; - creator: components["schemas"]["simple-user"]; - created_at: string; - updated_at: string; - /** The organization permission for this project. Only present when owner is an organization. */ - organization_permission?: string; - /** Whether the project is private or not. Only present when owner is an organization. */ - private?: boolean; - permissions: { - read: boolean; - write: boolean; - admin: boolean; - }; - }; - /** A team's access to a repository. */ - "team-repository": { - /** Unique identifier of the repository */ - id: number; - node_id: string; - /** The name of the repository. */ - name: string; - full_name: string; - license: components["schemas"]["license-simple"] | null; - forks: number; - permissions?: { - admin: boolean; - pull: boolean; - triage?: boolean; - push: boolean; - maintain?: boolean; - }; - owner: components["schemas"]["simple-user"] | null; - /** Whether the repository is private or public. */ - private: boolean; - html_url: string; - description: string | null; - fork: boolean; - url: string; - archive_url: string; - assignees_url: string; - blobs_url: string; - branches_url: string; - collaborators_url: string; - comments_url: string; - commits_url: string; - compare_url: string; - contents_url: string; - contributors_url: string; - deployments_url: string; - downloads_url: string; - events_url: string; - forks_url: string; - git_commits_url: string; - git_refs_url: string; - git_tags_url: string; - git_url: string; - issue_comment_url: string; - issue_events_url: string; - issues_url: string; - keys_url: string; - labels_url: string; - languages_url: string; - merges_url: string; - milestones_url: string; - notifications_url: string; - pulls_url: string; - releases_url: string; - ssh_url: string; - stargazers_url: string; - statuses_url: string; - subscribers_url: string; - subscription_url: string; - tags_url: string; - teams_url: string; - trees_url: string; - clone_url: string; - mirror_url: string | null; - hooks_url: string; - svn_url: string; - homepage: string | null; - language: string | null; - forks_count: number; - stargazers_count: number; - watchers_count: number; - size: number; - /** The default branch of the repository. */ - default_branch: string; - open_issues_count: number; - /** Whether this repository acts as a template that can be used to generate new repositories. */ - is_template?: boolean; - topics?: string[]; - /** Whether issues are enabled. */ - has_issues: boolean; - /** Whether projects are enabled. */ - has_projects: boolean; - /** Whether the wiki is enabled. */ - has_wiki: boolean; - has_pages: boolean; - /** Whether downloads are enabled. */ - has_downloads: boolean; - /** Whether the repository is archived. */ - archived: boolean; - /** Returns whether or not this repository disabled. */ - disabled: boolean; - /** The repository visibility: public, private, or internal. */ - visibility?: string; - pushed_at: string | null; - created_at: string | null; - updated_at: string | null; - /** Whether to allow rebase merges for pull requests. */ - allow_rebase_merge?: boolean; - template_repository?: components["schemas"]["repository"] | null; - temp_clone_token?: string; - /** Whether to allow squash merges for pull requests. */ - allow_squash_merge?: boolean; - /** Whether to delete head branches when pull requests are merged */ - delete_branch_on_merge?: boolean; - /** Whether to allow merge commits for pull requests. */ - allow_merge_commit?: boolean; - subscribers_count?: number; - network_count?: number; - open_issues: number; - watchers: number; - master_branch?: string; - }; - /** Project cards represent a scope of work. */ - "project-card": { - url: string; - /** The project card's ID */ - id: number; - node_id: string; - note: string | null; - creator: components["schemas"]["simple-user"] | null; - created_at: string; - updated_at: string; - /** Whether or not the card is archived */ - archived?: boolean; - column_url: string; - content_url?: string; - project_url: string; - }; - /** Project columns contain cards of work. */ - "project-column": { - url: string; - project_url: string; - cards_url: string; - /** The unique identifier of the project column */ - id: number; - node_id: string; - /** Name of the project column */ - name: string; - created_at: string; - updated_at: string; - }; - /** Repository Collaborator Permission */ - "repository-collaborator-permission": { - permission: string; - user: components["schemas"]["simple-user"] | null; - }; - "rate-limit": { - limit: number; - remaining: number; - reset: number; - }; - /** Rate Limit Overview */ - "rate-limit-overview": { - resources: { - core: components["schemas"]["rate-limit"]; - graphql?: components["schemas"]["rate-limit"]; - search: components["schemas"]["rate-limit"]; - source_import?: components["schemas"]["rate-limit"]; - integration_manifest?: components["schemas"]["rate-limit"]; - code_scanning_upload?: components["schemas"]["rate-limit"]; - }; - rate: components["schemas"]["rate-limit"]; - }; - /** Code of Conduct Simple */ - "code-of-conduct-simple": { - url: string; - key: string; - name: string; - html_url: string | null; - }; - /** Full Repository */ - "full-repository": { - id: number; - node_id: string; - name: string; - full_name: string; - owner: components["schemas"]["simple-user"] | null; - private: boolean; - html_url: string; - description: string | null; - fork: boolean; - url: string; - archive_url: string; - assignees_url: string; - blobs_url: string; - branches_url: string; - collaborators_url: string; - comments_url: string; - commits_url: string; - compare_url: string; - contents_url: string; - contributors_url: string; - deployments_url: string; - downloads_url: string; - events_url: string; - forks_url: string; - git_commits_url: string; - git_refs_url: string; - git_tags_url: string; - git_url: string; - issue_comment_url: string; - issue_events_url: string; - issues_url: string; - keys_url: string; - labels_url: string; - languages_url: string; - merges_url: string; - milestones_url: string; - notifications_url: string; - pulls_url: string; - releases_url: string; - ssh_url: string; - stargazers_url: string; - statuses_url: string; - subscribers_url: string; - subscription_url: string; - tags_url: string; - teams_url: string; - trees_url: string; - clone_url: string; - mirror_url: string | null; - hooks_url: string; - svn_url: string; - homepage: string | null; - language: string | null; - forks_count: number; - stargazers_count: number; - watchers_count: number; - size: number; - default_branch: string; - open_issues_count: number; - is_template?: boolean; - topics?: string[]; - has_issues: boolean; - has_projects: boolean; - has_wiki: boolean; - has_pages: boolean; - has_downloads: boolean; - archived: boolean; - /** Returns whether or not this repository disabled. */ - disabled: boolean; - /** The repository visibility: public, private, or internal. */ - visibility?: string; - pushed_at: string; - created_at: string; - updated_at: string; - permissions?: { - admin: boolean; - pull: boolean; - push: boolean; - }; - allow_rebase_merge?: boolean; - template_repository?: components["schemas"]["repository"] | null; - temp_clone_token?: string | null; - allow_squash_merge?: boolean; - delete_branch_on_merge?: boolean; - allow_merge_commit?: boolean; - subscribers_count: number; - network_count: number; - license: components["schemas"]["license-simple"] | null; - organization?: components["schemas"]["simple-user"] | null; - parent?: components["schemas"]["repository"]; - source?: components["schemas"]["repository"]; - forks: number; - master_branch?: string; - open_issues: number; - watchers: number; - /** Whether anonymous git access is allowed. */ - anonymous_access_enabled?: boolean; - code_of_conduct?: components["schemas"]["code-of-conduct-simple"]; - }; - /** An artifact */ - artifact: { - id: number; - node_id: string; - /** The name of the artifact. */ - name: string; - /** The size in bytes of the artifact. */ - size_in_bytes: number; - url: string; - archive_download_url: string; - /** Whether or not the artifact has expired. */ - expired: boolean; - created_at: string | null; - expires_at: string | null; - updated_at: string | null; - }; - /** Information of a job execution in a workflow run */ - job: { - /** The id of the job. */ - id: number; - /** The id of the associated workflow run. */ - run_id: number; - run_url: string; - node_id: string; - /** The SHA of the commit that is being run. */ - head_sha: string; - url: string; - html_url: string | null; - /** The phase of the lifecycle that the job is currently in. */ - status: "queued" | "in_progress" | "completed"; - /** The outcome of the job. */ - conclusion: string | null; - /** The time that the job started, in ISO 8601 format. */ - started_at: string; - /** The time that the job finished, in ISO 8601 format. */ - completed_at: string | null; - /** The name of the job. */ - name: string; - /** Steps in this job. */ - steps?: { - /** The phase of the lifecycle that the job is currently in. */ - status: "queued" | "in_progress" | "completed"; - /** The outcome of the job. */ - conclusion: string | null; - /** The name of the job. */ - name: string; - number: number; - /** The time that the step started, in ISO 8601 format. */ - started_at?: string | null; - /** The time that the job finished, in ISO 8601 format. */ - completed_at?: string | null; - }[]; - check_run_url: string; - }; - /** Whether GitHub Actions is enabled on the repository. */ - "actions-enabled": boolean; - "actions-repository-permissions": { - enabled: components["schemas"]["actions-enabled"]; - allowed_actions: components["schemas"]["allowed-actions"]; - selected_actions_url?: components["schemas"]["selected-actions-url"]; - }; - "pull-request-minimal": { - id: number; - number: number; - url: string; - head: { - ref: string; - sha: string; - repo: { - id: number; - url: string; - name: string; - }; - }; - base: { - ref: string; - sha: string; - repo: { - id: number; - url: string; - name: string; - }; - }; - }; - /** Simple Commit */ - "simple-commit": { - id: string; - tree_id: string; - message: string; - timestamp: string; - author: { - name: string; - email: string; - } | null; - committer: { - name: string; - email: string; - } | null; - }; - /** An invocation of a workflow */ - "workflow-run": { - /** The ID of the workflow run. */ - id: number; - /** The name of the workflow run. */ - name?: string | null; - node_id: string; - /** The ID of the associated check suite. */ - check_suite_id?: number; - /** The node ID of the associated check suite. */ - check_suite_node_id?: string; - head_branch: string | null; - /** The SHA of the head commit that points to the version of the worflow being run. */ - head_sha: string; - /** The auto incrementing run number for the workflow run. */ - run_number: number; - event: string; - status: string | null; - conclusion: string | null; - /** The ID of the parent workflow. */ - workflow_id: number; - /** The URL to the workflow run. */ - url: string; - html_url: string; - pull_requests: components["schemas"]["pull-request-minimal"][] | null; - created_at: string; - updated_at: string; - /** The URL to the jobs for the workflow run. */ - jobs_url: string; - /** The URL to download the logs for the workflow run. */ - logs_url: string; - /** The URL to the associated check suite. */ - check_suite_url: string; - /** The URL to the artifacts for the workflow run. */ - artifacts_url: string; - /** The URL to cancel the workflow run. */ - cancel_url: string; - /** The URL to rerun the workflow run. */ - rerun_url: string; - /** The URL to the workflow. */ - workflow_url: string; - head_commit: components["schemas"]["simple-commit"] | null; - repository: components["schemas"]["minimal-repository"]; - head_repository: components["schemas"]["minimal-repository"]; - head_repository_id?: number; - }; - /** An entry in the reviews log for environment deployments */ - "environment-approvals": { - /** The list of environments that were approved or rejected */ - environments: { - /** The id of the environment. */ - id?: number; - node_id?: string; - /** The name of the environment. */ - name?: string; - url?: string; - html_url?: string; - /** The time that the environment was created, in ISO 8601 format. */ - created_at?: string; - /** The time that the environment was last updated, in ISO 8601 format. */ - updated_at?: string; - }[]; - /** Whether deployment to the environment(s) was approved or rejected */ - state: "approved" | "rejected"; - user: components["schemas"]["simple-user"]; - /** The comment submitted with the deployment review */ - comment: string; - }; - /** The type of reviewer. Must be one of: `User` or `Team` */ - "deployment-reviewer-type": "User" | "Team"; - /** Details of a deployment that is waiting for protection rules to pass */ - "pending-deployment": { - environment: { - /** The id of the environment. */ - id?: number; - node_id?: string; - /** The name of the environment. */ - name?: string; - url?: string; - html_url?: string; - }; - /** The set duration of the wait timer */ - wait_timer: number; - /** The time that the wait timer began. */ - wait_timer_started_at: string | null; - /** Whether the currently authenticated user can approve the deployment */ - current_user_can_approve: boolean; - /** The people or teams that may approve jobs that reference the environment. You can list up to six users or teams as reviewers. The reviewers must have at least read access to the repository. Only one of the required reviewers needs to approve the job for it to proceed. */ - reviewers: { - type?: components["schemas"]["deployment-reviewer-type"]; - reviewer?: Partial & Partial; - }[]; - }; - /** A request for a specific ref(branch,sha,tag) to be deployed */ - deployment: { - url: string; - /** Unique identifier of the deployment */ - id: number; - node_id: string; - sha: string; - /** The ref to deploy. This can be a branch, tag, or sha. */ - ref: string; - /** Parameter to specify a task to execute */ - task: string; - payload: { - [key: string]: any; - } | string; - original_environment?: string; - /** Name for the target deployment environment. */ - environment: string; - description: string | null; - creator: components["schemas"]["simple-user"] | null; - created_at: string; - updated_at: string; - statuses_url: string; - repository_url: string; - /** Specifies if the given environment is will no longer exist at some point in the future. Default: false. */ - transient_environment?: boolean; - /** Specifies if the given environment is one that end-users directly interact with. Default: false. */ - production_environment?: boolean; - performed_via_github_app?: components["schemas"]["integration"] | null; - }; - /** Workflow Run Usage */ - "workflow-run-usage": { - billable: { - UBUNTU?: { - total_ms: number; - jobs: number; - }; - MACOS?: { - total_ms: number; - jobs: number; - }; - WINDOWS?: { - total_ms: number; - jobs: number; - }; - }; - run_duration_ms?: number; - }; - /** Set secrets for GitHub Actions. */ - "actions-secret": { - /** The name of the secret. */ - name: string; - created_at: string; - updated_at: string; - }; - /** A GitHub Actions workflow */ - workflow: { - id: number; - node_id: string; - name: string; - path: string; - state: "active" | "deleted" | "disabled_fork" | "disabled_inactivity" | "disabled_manually"; - created_at: string; - updated_at: string; - url: string; - html_url: string; - badge_url: string; - deleted_at?: string; - }; - /** Workflow Usage */ - "workflow-usage": { - billable: { - UBUNTU?: { - total_ms?: number; - }; - MACOS?: { - total_ms?: number; - }; - WINDOWS?: { - total_ms?: number; - }; - }; - }; - /** Protected Branch Admin Enforced */ - "protected-branch-admin-enforced": { - url: string; - enabled: boolean; - }; - /** Protected Branch Pull Request Review */ - "protected-branch-pull-request-review": { - url?: string; - dismissal_restrictions?: { - /** The list of users with review dismissal access. */ - users?: components["schemas"]["simple-user"][]; - /** The list of teams with review dismissal access. */ - teams?: components["schemas"]["team"][]; - url?: string; - users_url?: string; - teams_url?: string; - }; - dismiss_stale_reviews: boolean; - require_code_owner_reviews: boolean; - required_approving_review_count?: number; - }; - /** Branch Restriction Policy */ - "branch-restriction-policy": { - url: string; - users_url: string; - teams_url: string; - apps_url: string; - users: { - login?: string; - id?: number; - node_id?: string; - avatar_url?: string; - gravatar_id?: string; - url?: string; - html_url?: string; - followers_url?: string; - following_url?: string; - gists_url?: string; - starred_url?: string; - subscriptions_url?: string; - organizations_url?: string; - repos_url?: string; - events_url?: string; - received_events_url?: string; - type?: string; - site_admin?: boolean; - }[]; - teams: { - id?: number; - node_id?: string; - url?: string; - html_url?: string; - name?: string; - slug?: string; - description?: string | null; - privacy?: string; - permission?: string; - members_url?: string; - repositories_url?: string; - parent?: string | null; - }[]; - apps: { - id?: number; - slug?: string; - node_id?: string; - owner?: { - login?: string; - id?: number; - node_id?: string; - url?: string; - repos_url?: string; - events_url?: string; - hooks_url?: string; - issues_url?: string; - members_url?: string; - public_members_url?: string; - avatar_url?: string; - description?: string; - gravatar_id?: string; - html_url?: string; - followers_url?: string; - following_url?: string; - gists_url?: string; - starred_url?: string; - subscriptions_url?: string; - organizations_url?: string; - received_events_url?: string; - type?: string; - }; - name?: string; - description?: string; - external_url?: string; - html_url?: string; - created_at?: string; - updated_at?: string; - permissions?: { - metadata?: string; - contents?: string; - issues?: string; - single_file?: string; - }; - events?: string[]; - }[]; - }; - /** Branch Protection */ - "branch-protection": { - url?: string; - required_status_checks?: { - url?: string; - enforcement_level?: string; - contexts: string[]; - contexts_url?: string; - }; - enforce_admins?: components["schemas"]["protected-branch-admin-enforced"]; - required_pull_request_reviews?: components["schemas"]["protected-branch-pull-request-review"]; - restrictions?: components["schemas"]["branch-restriction-policy"]; - required_linear_history?: { - enabled?: boolean; - }; - allow_force_pushes?: { - enabled?: boolean; - }; - allow_deletions?: { - enabled?: boolean; - }; - required_conversation_resolution?: { - enabled?: boolean; - }; - name?: string; - protection_url?: string; - required_signatures?: { - url: string; - enabled: boolean; - }; - }; - /** Short Branch */ - "short-branch": { - name: string; - commit: { - sha: string; - url: string; - }; - protected: boolean; - protection?: components["schemas"]["branch-protection"]; - protection_url?: string; - }; - /** Metaproperties for Git author/committer information. */ - "git-user": { - name?: string; - email?: string; - date?: string; - }; - verification: { - verified: boolean; - reason: string; - payload: string | null; - signature: string | null; - }; - /** Commit */ - commit: { - url: string; - sha: string; - node_id: string; - html_url: string; - comments_url: string; - commit: { - url: string; - author: components["schemas"]["git-user"] | null; - committer: components["schemas"]["git-user"] | null; - message: string; - comment_count: number; - tree: { - sha: string; - url: string; - }; - verification?: components["schemas"]["verification"]; - }; - author: components["schemas"]["simple-user"] | null; - committer: components["schemas"]["simple-user"] | null; - parents: { - sha: string; - url: string; - html_url?: string; - }[]; - stats?: { - additions?: number; - deletions?: number; - total?: number; - }; - files?: { - filename?: string; - additions?: number; - deletions?: number; - changes?: number; - status?: string; - raw_url?: string; - blob_url?: string; - patch?: string; - sha?: string; - contents_url?: string; - previous_filename?: string; - }[]; - }; - /** Branch With Protection */ - "branch-with-protection": { - name: string; - commit: components["schemas"]["commit"]; - _links: { - html: string; - self: string; - }; - protected: boolean; - protection: components["schemas"]["branch-protection"]; - protection_url: string; - pattern?: string; - required_approving_review_count?: number; - }; - /** Status Check Policy */ - "status-check-policy": { - url: string; - strict: boolean; - contexts: string[]; - contexts_url: string; - }; - /** Branch protections protect branches */ - "protected-branch": { - url: string; - required_status_checks?: components["schemas"]["status-check-policy"]; - required_pull_request_reviews?: { - url: string; - dismiss_stale_reviews?: boolean; - require_code_owner_reviews?: boolean; - required_approving_review_count?: number; - dismissal_restrictions?: { - url: string; - users_url: string; - teams_url: string; - users: components["schemas"]["simple-user"][]; - teams: components["schemas"]["team"][]; - }; - }; - required_signatures?: { - url: string; - enabled: boolean; - }; - enforce_admins?: { - url: string; - enabled: boolean; - }; - required_linear_history?: { - enabled: boolean; - }; - allow_force_pushes?: { - enabled: boolean; - }; - allow_deletions?: { - enabled: boolean; - }; - restrictions?: components["schemas"]["branch-restriction-policy"]; - required_conversation_resolution?: { - enabled?: boolean; - }; - }; - /** A deployment created as the result of an Actions check run from a workflow that references an environment */ - "deployment-simple": { - url: string; - /** Unique identifier of the deployment */ - id: number; - node_id: string; - /** Parameter to specify a task to execute */ - task: string; - original_environment?: string; - /** Name for the target deployment environment. */ - environment: string; - description: string | null; - created_at: string; - updated_at: string; - statuses_url: string; - repository_url: string; - /** Specifies if the given environment is will no longer exist at some point in the future. Default: false. */ - transient_environment?: boolean; - /** Specifies if the given environment is one that end-users directly interact with. Default: false. */ - production_environment?: boolean; - performed_via_github_app?: components["schemas"]["integration"] | null; - }; - /** A check performed on the code of a given code change */ - "check-run": { - /** The id of the check. */ - id: number; - /** The SHA of the commit that is being checked. */ - head_sha: string; - node_id: string; - external_id: string | null; - url: string; - html_url: string | null; - details_url: string | null; - /** The phase of the lifecycle that the check is currently in. */ - status: "queued" | "in_progress" | "completed"; - conclusion: ("success" | "failure" | "neutral" | "cancelled" | "skipped" | "timed_out" | "action_required") | null; - started_at: string | null; - completed_at: string | null; - output: { - title: string | null; - summary: string | null; - text: string | null; - annotations_count: number; - annotations_url: string; - }; - /** The name of the check. */ - name: string; - check_suite: { - id: number; - } | null; - app: components["schemas"]["integration"] | null; - pull_requests: components["schemas"]["pull-request-minimal"][]; - deployment?: components["schemas"]["deployment-simple"]; - }; - /** Check Annotation */ - "check-annotation": { - path: string; - start_line: number; - end_line: number; - start_column: number | null; - end_column: number | null; - annotation_level: string | null; - title: string | null; - message: string | null; - raw_details: string | null; - blob_href: string; - }; - /** A suite of checks performed on the code of a given code change */ - "check-suite": { - id: number; - node_id: string; - head_branch: string | null; - /** The SHA of the head commit that is being checked. */ - head_sha: string; - status: ("queued" | "in_progress" | "completed") | null; - conclusion: ("success" | "failure" | "neutral" | "cancelled" | "skipped" | "timed_out" | "action_required") | null; - url: string | null; - before: string | null; - after: string | null; - pull_requests: components["schemas"]["pull-request-minimal"][] | null; - app: components["schemas"]["integration"] | null; - repository: components["schemas"]["minimal-repository"]; - created_at: string | null; - updated_at: string | null; - head_commit: components["schemas"]["simple-commit"]; - latest_check_runs_count: number; - check_runs_url: string; - }; - /** Check suite configuration preferences for a repository. */ - "check-suite-preference": { - preferences: { - auto_trigger_checks?: { - app_id: number; - setting: boolean; - }[]; - }; - repository: components["schemas"]["minimal-repository"]; - }; - /** The name of the tool used to generate the code scanning analysis. */ - "code-scanning-analysis-tool-name": string; - /** The GUID of the tool used to generate the code scanning analysis, if provided in the uploaded SARIF data. */ - "code-scanning-analysis-tool-guid": string | null; - /** - * The full Git reference, formatted as `refs/heads/`, - * `refs/pull//merge`, or `refs/pull//head`. - */ - "code-scanning-ref": string; - /** State of a code scanning alert. */ - "code-scanning-alert-state": "open" | "closed" | "dismissed" | "fixed"; - /** The security alert number. */ - "alert-number": number; - /** The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. */ - "alert-created-at": string; - /** The REST API URL of the alert resource. */ - "alert-url": string; - /** The GitHub URL of the alert resource. */ - "alert-html-url": string; - /** The REST API URL for fetching the list of instances for an alert. */ - "alert-instances-url": string; - /** The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. */ - "code-scanning-alert-dismissed-at": string | null; - /** **Required when the state is dismissed.** The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. */ - "code-scanning-alert-dismissed-reason": string | null; - "code-scanning-alert-rule-summary": { - /** A unique identifier for the rule used to detect the alert. */ - id?: string | null; - /** The name of the rule used to detect the alert. */ - name?: string; - /** The severity of the alert. */ - severity?: ("none" | "note" | "warning" | "error") | null; - /** A short description of the rule used to detect the alert. */ - description?: string; - }; - /** The version of the tool used to generate the code scanning analysis. */ - "code-scanning-analysis-tool-version": string | null; - "code-scanning-analysis-tool": { - name?: components["schemas"]["code-scanning-analysis-tool-name"]; - version?: components["schemas"]["code-scanning-analysis-tool-version"]; - guid?: components["schemas"]["code-scanning-analysis-tool-guid"]; - }; - /** Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name. */ - "code-scanning-analysis-analysis-key": string; - /** Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed. */ - "code-scanning-alert-environment": string; - /** Describe a region within a file for the alert. */ - "code-scanning-alert-location": { - path?: string; - start_line?: number; - end_line?: number; - start_column?: number; - end_column?: number; - }; - /** A classification of the file. For example to identify it as generated. */ - "code-scanning-alert-classification": ("source" | "generated" | "test" | "library") | null; - "code-scanning-alert-instance": { - ref?: components["schemas"]["code-scanning-ref"]; - analysis_key?: components["schemas"]["code-scanning-analysis-analysis-key"]; - environment?: components["schemas"]["code-scanning-alert-environment"]; - state?: components["schemas"]["code-scanning-alert-state"]; - commit_sha?: string; - message?: { - text?: string; - }; - location?: components["schemas"]["code-scanning-alert-location"]; - html_url?: string; - /** - * Classifications that have been applied to the file that triggered the alert. - * For example identifying it as documentation, or a generated file. - */ - classifications?: components["schemas"]["code-scanning-alert-classification"][]; - }; - "code-scanning-alert-items": { - number: components["schemas"]["alert-number"]; - created_at: components["schemas"]["alert-created-at"]; - url: components["schemas"]["alert-url"]; - html_url: components["schemas"]["alert-html-url"]; - instances_url: components["schemas"]["alert-instances-url"]; - state: components["schemas"]["code-scanning-alert-state"]; - dismissed_by: components["schemas"]["simple-user"]; - dismissed_at: components["schemas"]["code-scanning-alert-dismissed-at"]; - dismissed_reason: components["schemas"]["code-scanning-alert-dismissed-reason"]; - rule: components["schemas"]["code-scanning-alert-rule-summary"]; - tool: components["schemas"]["code-scanning-analysis-tool"]; - most_recent_instance: components["schemas"]["code-scanning-alert-instance"]; - }; - "code-scanning-alert-rule": { - /** A unique identifier for the rule used to detect the alert. */ - id?: string | null; - /** The name of the rule used to detect the alert. */ - name?: string; - /** The severity of the alert. */ - severity?: ("none" | "note" | "warning" | "error") | null; - /** A short description of the rule used to detect the alert. */ - description?: string; - /** description of the rule used to detect the alert. */ - full_description?: string; - /** A set of tags applicable for the rule. */ - tags?: string[] | null; - /** Detailed documentation for the rule as GitHub Flavored Markdown. */ - help?: string | null; - }; - "code-scanning-alert": { - number: components["schemas"]["alert-number"]; - created_at: components["schemas"]["alert-created-at"]; - url: components["schemas"]["alert-url"]; - html_url: components["schemas"]["alert-html-url"]; - instances?: { - [key: string]: any; - }; - instances_url: components["schemas"]["alert-instances-url"]; - state: components["schemas"]["code-scanning-alert-state"]; - dismissed_by: components["schemas"]["simple-user"]; - dismissed_at: components["schemas"]["code-scanning-alert-dismissed-at"]; - dismissed_reason: components["schemas"]["code-scanning-alert-dismissed-reason"]; - rule: components["schemas"]["code-scanning-alert-rule"]; - tool: components["schemas"]["code-scanning-analysis-tool"]; - most_recent_instance: components["schemas"]["code-scanning-alert-instance"]; - }; - /** Sets the state of the code scanning alert. Can be one of `open` or `dismissed`. You must provide `dismissed_reason` when you set the state to `dismissed`. */ - "code-scanning-alert-set-state": "open" | "dismissed"; - /** An identifier for the upload. */ - "code-scanning-analysis-sarif-id": string; - /** The SHA of the commit to which the analysis you are uploading relates. */ - "code-scanning-analysis-commit-sha": string; - /** Identifies the variable values associated with the environment in which this analysis was performed. */ - "code-scanning-analysis-environment": string; - /** Identifies the configuration and environment under which the analysis was executed. */ - "code-scanning-analysis-category": string; - /** The time that the analysis was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. */ - "code-scanning-analysis-created-at": string; - /** The REST API URL of the analysis resource. */ - "code-scanning-analysis-url": string; - "code-scanning-analysis": { - ref: components["schemas"]["code-scanning-ref"]; - commit_sha: components["schemas"]["code-scanning-analysis-commit-sha"]; - analysis_key: components["schemas"]["code-scanning-analysis-analysis-key"]; - environment: components["schemas"]["code-scanning-analysis-environment"]; - category?: components["schemas"]["code-scanning-analysis-category"]; - error: string; - created_at: components["schemas"]["code-scanning-analysis-created-at"]; - /** The total number of results in the analysis. */ - results_count: number; - /** The total number of rules used in the analysis. */ - rules_count: number; - /** Unique identifier for this analysis. */ - id: number; - url: components["schemas"]["code-scanning-analysis-url"]; - sarif_id: components["schemas"]["code-scanning-analysis-sarif-id"]; - tool: components["schemas"]["code-scanning-analysis-tool"]; - deletable: boolean; - /** Warning generated when processing the analysis */ - warning: string; - }; - /** Successful deletion of a code scanning analysis */ - "code-scanning-analysis-deletion": { - /** Next deletable analysis in chain, without last analysis deletion confirmation */ - next_analysis_url: string | null; - /** Next deletable analysis in chain, with last analysis deletion confirmation */ - confirm_delete_url: string | null; - }; - /** Scim Error */ - "scim-error": { - message?: string | null; - documentation_url?: string | null; - detail?: string | null; - status?: number; - scimType?: string | null; - schemas?: string[]; - }; - /** A Base64 string representing the SARIF file to upload. You must first compress your SARIF file using [`gzip`](http://www.gnu.org/software/gzip/manual/gzip.html) and then translate the contents of the file into a Base64 encoding string. For more information, see "[SARIF support for code scanning](https://docs.github.com/github/finding-security-vulnerabilities-and-errors-in-your-code/sarif-support-for-code-scanning)." */ - "code-scanning-analysis-sarif-file": string; - "code-scanning-sarifs-receipt": { - id?: components["schemas"]["code-scanning-analysis-sarif-id"]; - /** The REST API URL for checking the status of the upload. */ - url?: string; - }; - "code-scanning-sarifs-status": { - /** `pending` files have not yet been processed, while `complete` means all results in the SARIF have been stored. */ - processing_status?: "pending" | "complete"; - /** The REST API URL for getting the analyses associated with the upload. */ - analyses_url?: string | null; - }; - /** Collaborator */ - collaborator: { - login: string; - id: number; - node_id: string; - avatar_url: string; - gravatar_id: string | null; - url: string; - html_url: string; - followers_url: string; - following_url: string; - gists_url: string; - starred_url: string; - subscriptions_url: string; - organizations_url: string; - repos_url: string; - events_url: string; - received_events_url: string; - type: string; - site_admin: boolean; - permissions?: { - pull: boolean; - push: boolean; - admin: boolean; - }; - }; - /** Repository invitations let you manage who you collaborate with. */ - "repository-invitation": { - /** Unique identifier of the repository invitation. */ - id: number; - repository: components["schemas"]["minimal-repository"]; - invitee: components["schemas"]["simple-user"] | null; - inviter: components["schemas"]["simple-user"] | null; - /** The permission associated with the invitation. */ - permissions: "read" | "write" | "admin" | "triage" | "maintain"; - created_at: string; - /** Whether or not the invitation has expired */ - expired?: boolean; - /** URL for the repository invitation */ - url: string; - html_url: string; - node_id: string; - }; - /** Commit Comment */ - "commit-comment": { - html_url: string; - url: string; - id: number; - node_id: string; - body: string; - path: string | null; - position: number | null; - line: number | null; - commit_id: string; - user: components["schemas"]["simple-user"] | null; - created_at: string; - updated_at: string; - author_association: components["schemas"]["author_association"]; - reactions?: components["schemas"]["reaction-rollup"]; - }; - /** Branch Short */ - "branch-short": { - name: string; - commit: { - sha: string; - url: string; - }; - protected: boolean; - }; - /** Hypermedia Link */ - link: { - href: string; - }; - /** The status of auto merging a pull request. */ - auto_merge: { - enabled_by: components["schemas"]["simple-user"]; - /** The merge method to use. */ - merge_method: "merge" | "squash" | "rebase"; - /** Title for the merge commit message. */ - commit_title: string; - /** Commit message for the merge commit. */ - commit_message: string; - } | null; - /** Pull Request Simple */ - "pull-request-simple": { - url: string; - id: number; - node_id: string; - html_url: string; - diff_url: string; - patch_url: string; - issue_url: string; - commits_url: string; - review_comments_url: string; - review_comment_url: string; - comments_url: string; - statuses_url: string; - number: number; - state: string; - locked: boolean; - title: string; - user: components["schemas"]["simple-user"] | null; - body: string | null; - labels: { - id?: number; - node_id?: string; - url?: string; - name?: string; - description?: string; - color?: string; - default?: boolean; - }[]; - milestone: components["schemas"]["milestone"] | null; - active_lock_reason?: string | null; - created_at: string; - updated_at: string; - closed_at: string | null; - merged_at: string | null; - merge_commit_sha: string | null; - assignee: components["schemas"]["simple-user"] | null; - assignees?: components["schemas"]["simple-user"][] | null; - requested_reviewers?: components["schemas"]["simple-user"][] | null; - requested_teams?: components["schemas"]["team-simple"][] | null; - head: { - label: string; - ref: string; - repo: components["schemas"]["repository"]; - sha: string; - user: components["schemas"]["simple-user"] | null; - }; - base: { - label: string; - ref: string; - repo: components["schemas"]["repository"]; - sha: string; - user: components["schemas"]["simple-user"] | null; - }; - _links: { - comments: components["schemas"]["link"]; - commits: components["schemas"]["link"]; - statuses: components["schemas"]["link"]; - html: components["schemas"]["link"]; - issue: components["schemas"]["link"]; - review_comments: components["schemas"]["link"]; - review_comment: components["schemas"]["link"]; - self: components["schemas"]["link"]; - }; - author_association: components["schemas"]["author_association"]; - auto_merge: components["schemas"]["auto_merge"]; - /** Indicates whether or not the pull request is a draft. */ - draft?: boolean; - }; - "simple-commit-status": { - description: string | null; - id: number; - node_id: string; - state: string; - context: string; - target_url: string; - required?: boolean | null; - avatar_url: string | null; - url: string; - created_at: string; - updated_at: string; - }; - /** Combined Commit Status */ - "combined-commit-status": { - state: string; - statuses: components["schemas"]["simple-commit-status"][]; - sha: string; - total_count: number; - repository: components["schemas"]["minimal-repository"]; - commit_url: string; - url: string; - }; - /** The status of a commit. */ - status: { - url: string; - avatar_url: string | null; - id: number; - node_id: string; - state: string; - description: string; - target_url: string; - context: string; - created_at: string; - updated_at: string; - creator: components["schemas"]["simple-user"]; - }; - "community-health-file": { - url: string; - html_url: string; - }; - /** Community Profile */ - "community-profile": { - health_percentage: number; - description: string | null; - documentation: string | null; - files: { - code_of_conduct: components["schemas"]["code-of-conduct-simple"] | null; - license: components["schemas"]["license-simple"] | null; - contributing: components["schemas"]["community-health-file"] | null; - readme: components["schemas"]["community-health-file"] | null; - issue_template: components["schemas"]["community-health-file"] | null; - pull_request_template: components["schemas"]["community-health-file"] | null; - }; - updated_at: string | null; - content_reports_enabled?: boolean; - }; - /** Diff Entry */ - "diff-entry": { - sha: string; - filename: string; - status: string; - additions: number; - deletions: number; - changes: number; - blob_url: string; - raw_url: string; - contents_url: string; - patch?: string; - previous_filename?: string; - }; - /** Commit Comparison */ - "commit-comparison": { - url: string; - html_url: string; - permalink_url: string; - diff_url: string; - patch_url: string; - base_commit: components["schemas"]["commit"]; - merge_base_commit: components["schemas"]["commit"]; - status: "diverged" | "ahead" | "behind" | "identical"; - ahead_by: number; - behind_by: number; - total_commits: number; - commits: components["schemas"]["commit"][]; - files?: components["schemas"]["diff-entry"][]; - }; - /** Content Tree */ - "content-tree": { - type: string; - size: number; - name: string; - path: string; - sha: string; - url: string; - git_url: string | null; - html_url: string | null; - download_url: string | null; - entries?: { - type: string; - size: number; - name: string; - path: string; - content?: string; - sha: string; - url: string; - git_url: string | null; - html_url: string | null; - download_url: string | null; - _links: { - git: string | null; - html: string | null; - self: string; - }; - }[]; - _links: { - git: string | null; - html: string | null; - self: string; - }; - }; - /** A list of directory items */ - "content-directory": { - type: string; - size: number; - name: string; - path: string; - content?: string; - sha: string; - url: string; - git_url: string | null; - html_url: string | null; - download_url: string | null; - _links: { - git: string | null; - html: string | null; - self: string; - }; - }[]; - /** Content File */ - "content-file": { - type: string; - encoding: string; - size: number; - name: string; - path: string; - content: string; - sha: string; - url: string; - git_url: string | null; - html_url: string | null; - download_url: string | null; - _links: { - git: string | null; - html: string | null; - self: string; - }; - target?: string; - submodule_git_url?: string; - }; - /** An object describing a symlink */ - "content-symlink": { - type: string; - target: string; - size: number; - name: string; - path: string; - sha: string; - url: string; - git_url: string | null; - html_url: string | null; - download_url: string | null; - _links: { - git: string | null; - html: string | null; - self: string; - }; - }; - /** An object describing a symlink */ - "content-submodule": { - type: string; - submodule_git_url: string; - size: number; - name: string; - path: string; - sha: string; - url: string; - git_url: string | null; - html_url: string | null; - download_url: string | null; - _links: { - git: string | null; - html: string | null; - self: string; - }; - }; - /** File Commit */ - "file-commit": { - content: { - name?: string; - path?: string; - sha?: string; - size?: number; - url?: string; - html_url?: string; - git_url?: string; - download_url?: string; - type?: string; - _links?: { - self?: string; - git?: string; - html?: string; - }; - } | null; - commit: { - sha?: string; - node_id?: string; - url?: string; - html_url?: string; - author?: { - date?: string; - name?: string; - email?: string; - }; - committer?: { - date?: string; - name?: string; - email?: string; - }; - message?: string; - tree?: { - url?: string; - sha?: string; - }; - parents?: { - url?: string; - html_url?: string; - sha?: string; - }[]; - verification?: { - verified?: boolean; - reason?: string; - signature?: string | null; - payload?: string | null; - }; - }; - }; - /** Contributor */ - contributor: { - login?: string; - id?: number; - node_id?: string; - avatar_url?: string; - gravatar_id?: string | null; - url?: string; - html_url?: string; - followers_url?: string; - following_url?: string; - gists_url?: string; - starred_url?: string; - subscriptions_url?: string; - organizations_url?: string; - repos_url?: string; - events_url?: string; - received_events_url?: string; - type: string; - site_admin?: boolean; - contributions: number; - email?: string; - name?: string; - }; - /** The status of a deployment. */ - "deployment-status": { - url: string; - id: number; - node_id: string; - /** The state of the status. */ - state: "error" | "failure" | "inactive" | "pending" | "success" | "queued" | "in_progress"; - creator: components["schemas"]["simple-user"] | null; - /** A short description of the status. */ - description: string; - /** The environment of the deployment that the status is for. */ - environment?: string; - /** Deprecated: the URL to associate with this status. */ - target_url: string; - created_at: string; - updated_at: string; - deployment_url: string; - repository_url: string; - /** The URL for accessing your environment. */ - environment_url?: string; - /** The URL to associate with this status. */ - log_url?: string; - performed_via_github_app?: components["schemas"]["integration"] | null; - }; - /** The amount of time to delay a job after the job is initially triggered. The time (in minutes) must be an integer between 0 and 43,200 (30 days). */ - "wait-timer": number; - /** The type of deployment branch policy for this environment. To allow all branches to deploy, set to `null`. */ - deployment_branch_policy: { - /** Whether only branches with branch protection rules can deploy to this environment. If `protected_branches` is `true`, `custom_branch_policies` must be `false`; if `protected_branches` is `false`, `custom_branch_policies` must be `true`. */ - protected_branches: boolean; - /** Whether only branches that match the specified name patterns can deploy to this environment. If `custom_branch_policies` is `true`, `protected_branches` must be `false`; if `custom_branch_policies` is `false`, `protected_branches` must be `true`. */ - custom_branch_policies: boolean; - } | null; - /** Details of a deployment environment */ - environment: { - /** The id of the environment. */ - id: number; - node_id: string; - /** The name of the environment. */ - name: string; - url: string; - html_url: string; - /** The time that the environment was created, in ISO 8601 format. */ - created_at: string; - /** The time that the environment was last updated, in ISO 8601 format. */ - updated_at: string; - protection_rules?: (Partial<{ - id: number; - node_id: string; - type: string; - wait_timer?: components["schemas"]["wait-timer"]; - }> & Partial<{ - id: number; - node_id: string; - type: string; - /** The people or teams that may approve jobs that reference the environment. You can list up to six users or teams as reviewers. The reviewers must have at least read access to the repository. Only one of the required reviewers needs to approve the job for it to proceed. */ - reviewers?: { - type?: components["schemas"]["deployment-reviewer-type"]; - reviewer?: Partial & Partial; - }[]; - }> & Partial<{ - id: number; - node_id: string; - type: string; - }>)[]; - deployment_branch_policy?: components["schemas"]["deployment_branch_policy"]; - }; - /** Short Blob */ - "short-blob": { - url: string; - sha: string; - }; - /** Blob */ - blob: { - content: string; - encoding: string; - url: string; - sha: string; - size: number | null; - node_id: string; - highlighted_content?: string; - }; - /** Low-level Git commit operations within a repository */ - "git-commit": { - /** SHA for the commit */ - sha: string; - node_id: string; - url: string; - /** Identifying information for the git-user */ - author: { - /** Timestamp of the commit */ - date: string; - /** Git email address of the user */ - email: string; - /** Name of the git user */ - name: string; - }; - /** Identifying information for the git-user */ - committer: { - /** Timestamp of the commit */ - date: string; - /** Git email address of the user */ - email: string; - /** Name of the git user */ - name: string; - }; - /** Message describing the purpose of the commit */ - message: string; - tree: { - /** SHA for the commit */ - sha: string; - url: string; - }; - parents: { - /** SHA for the commit */ - sha: string; - url: string; - html_url: string; - }[]; - verification: { - verified: boolean; - reason: string; - signature: string | null; - payload: string | null; - }; - html_url: string; - }; - /** Git references within a repository */ - "git-ref": { - ref: string; - node_id: string; - url: string; - object: { - type: string; - /** SHA for the reference */ - sha: string; - url: string; - }; - }; - /** Metadata for a Git tag */ - "git-tag": { - node_id: string; - /** Name of the tag */ - tag: string; - sha: string; - /** URL for the tag */ - url: string; - /** Message describing the purpose of the tag */ - message: string; - tagger: { - date: string; - email: string; - name: string; - }; - object: { - sha: string; - type: string; - url: string; - }; - verification?: components["schemas"]["verification"]; - }; - /** The hierarchy between files in a Git repository. */ - "git-tree": { - sha: string; - url: string; - truncated: boolean; - /** Objects specifying a tree structure */ - tree: { - path?: string; - mode?: string; - type?: string; - sha?: string; - size?: number; - url?: string; - }[]; - }; - "hook-response": { - code: number | null; - status: string | null; - message: string | null; - }; - /** Webhooks for repositories. */ - hook: { - type: string; - /** Unique identifier of the webhook. */ - id: number; - /** The name of a valid service, use 'web' for a webhook. */ - name: string; - /** Determines whether the hook is actually triggered on pushes. */ - active: boolean; - /** Determines what events the hook is triggered for. Default: ['push']. */ - events: string[]; - config: { - email?: string; - password?: string; - room?: string; - subdomain?: string; - url?: components["schemas"]["webhook-config-url"]; - insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; - content_type?: components["schemas"]["webhook-config-content-type"]; - digest?: string; - secret?: components["schemas"]["webhook-config-secret"]; - token?: string; - }; - updated_at: string; - created_at: string; - url: string; - test_url: string; - ping_url: string; - last_response: components["schemas"]["hook-response"]; - }; - /** A repository import from an external source. */ - import: { - vcs: string | null; - use_lfs?: boolean; - /** The URL of the originating repository. */ - vcs_url: string; - svc_root?: string; - tfvc_project?: string; - status: "auth" | "error" | "none" | "detecting" | "choose" | "auth_failed" | "importing" | "mapping" | "waiting_to_push" | "pushing" | "complete" | "setup" | "unknown" | "detection_found_multiple" | "detection_found_nothing" | "detection_needs_auth"; - status_text?: string | null; - failed_step?: string | null; - error_message?: string | null; - import_percent?: number | null; - commit_count?: number | null; - push_percent?: number | null; - has_large_files?: boolean; - large_files_size?: number; - large_files_count?: number; - project_choices?: { - vcs?: string; - tfvc_project?: string; - human_name?: string; - }[]; - message?: string; - authors_count?: number | null; - url: string; - html_url: string; - authors_url: string; - repository_url: string; - svn_root?: string; - }; - /** Porter Author */ - "porter-author": { - id: number; - remote_id: string; - remote_name: string; - email: string; - name: string; - url: string; - import_url: string; - }; - /** Porter Large File */ - "porter-large-file": { - ref_name: string; - path: string; - oid: string; - size: number; - }; - /** Issue Event Label */ - "issue-event-label": { - name: string | null; - color: string | null; - }; - "issue-event-dismissed-review": { - state: string; - review_id: number; - dismissal_message: string | null; - dismissal_commit_id?: string | null; - }; - /** Issue Event Milestone */ - "issue-event-milestone": { - title: string; - }; - /** Issue Event Project Card */ - "issue-event-project-card": { - url: string; - id: number; - project_url: string; - project_id: number; - column_name: string; - previous_column_name?: string; - }; - /** Issue Event Rename */ - "issue-event-rename": { - from: string; - to: string; - }; - /** Issue Event */ - "issue-event": { - id: number; - node_id: string; - url: string; - actor: components["schemas"]["simple-user"] | null; - event: string; - commit_id: string | null; - commit_url: string | null; - created_at: string; - issue?: components["schemas"]["issue-simple"]; - label?: components["schemas"]["issue-event-label"]; - assignee?: components["schemas"]["simple-user"] | null; - assigner?: components["schemas"]["simple-user"] | null; - review_requester?: components["schemas"]["simple-user"] | null; - requested_reviewer?: components["schemas"]["simple-user"] | null; - requested_team?: components["schemas"]["team"]; - dismissed_review?: components["schemas"]["issue-event-dismissed-review"]; - milestone?: components["schemas"]["issue-event-milestone"]; - project_card?: components["schemas"]["issue-event-project-card"]; - rename?: components["schemas"]["issue-event-rename"]; - author_association?: components["schemas"]["author_association"]; - lock_reason?: string | null; - performed_via_github_app?: components["schemas"]["integration"] | null; - }; - /** Issue Event for Issue */ - "issue-event-for-issue": { - id?: number; - node_id?: string; - url?: string; - actor?: components["schemas"]["simple-user"]; - event?: string; - commit_id?: string | null; - commit_url?: string | null; - created_at?: string; - sha?: string; - html_url?: string; - message?: string; - issue_url?: string; - updated_at?: string; - author_association?: components["schemas"]["author_association"]; - body?: string | null; - lock_reason?: string | null; - submitted_at?: string; - state?: string; - pull_request_url?: string; - body_html?: string; - body_text?: string; - }; - /** An SSH key granting access to a single repository. */ - "deploy-key": { - id: number; - key: string; - url: string; - title: string; - verified: boolean; - created_at: string; - read_only: boolean; - }; - /** Language */ - language: { - [key: string]: number; - }; - /** License Content */ - "license-content": { - name: string; - path: string; - sha: string; - size: number; - url: string; - html_url: string | null; - git_url: string | null; - download_url: string | null; - type: string; - content: string; - encoding: string; - _links: { - git: string | null; - html: string | null; - self: string; - }; - license: components["schemas"]["license-simple"] | null; - }; - "pages-source-hash": { - branch: string; - path: string; - }; - "pages-https-certificate": { - state: "new" | "authorization_created" | "authorization_pending" | "authorized" | "authorization_revoked" | "issued" | "uploaded" | "approved" | "errored" | "bad_authz" | "destroy_pending" | "dns_changed"; - description: string; - /** Array of the domain set and its alternate name (if it is configured) */ - domains: any[]; - expires_at?: string; - }; - /** The configuration for GitHub Pages for a repository. */ - page: { - /** The API address for accessing this Page resource. */ - url: string; - /** The status of the most recent build of the Page. */ - status: ("built" | "building" | "errored") | null; - /** The Pages site's custom domain */ - cname: string | null; - /** Whether the Page has a custom 404 page. */ - custom_404: boolean; - /** The web address the Page can be accessed from. */ - html_url?: string; - source?: components["schemas"]["pages-source-hash"]; - /** Whether the GitHub Pages site is publicly visible. If set to `true`, the site is accessible to anyone on the internet. If set to `false`, the site will only be accessible to users who have at least `read` access to the repository that published the site. */ - public: boolean; - https_certificate?: components["schemas"]["pages-https-certificate"]; - /** Whether https is enabled on the domain */ - https_enforced?: boolean; - }; - /** Page Build */ - "page-build": { - url: string; - status: string; - error: { - message: string | null; - }; - pusher: components["schemas"]["simple-user"] | null; - commit: string; - duration: number; - created_at: string; - updated_at: string; - }; - /** Page Build Status */ - "page-build-status": { - url: string; - status: string; - }; - /** Pages Health Check Status */ - "pages-health-check": { - domain?: { - host?: string; - uri?: string; - nameservers?: string; - dns_resolves?: boolean; - is_proxied?: boolean | null; - is_cloudflare_ip?: boolean | null; - is_fastly_ip?: boolean | null; - is_old_ip_address?: boolean | null; - is_a_record?: boolean | null; - has_cname_record?: boolean | null; - has_mx_records_present?: boolean | null; - is_valid_domain?: boolean; - is_apex_domain?: boolean; - should_be_a_record?: boolean | null; - is_cname_to_github_user_domain?: boolean | null; - is_cname_to_pages_dot_github_dot_com?: boolean | null; - is_cname_to_fastly?: boolean | null; - is_pointed_to_github_pages_ip?: boolean | null; - is_non_github_pages_ip_present?: boolean | null; - is_pages_domain?: boolean; - is_served_by_pages?: boolean | null; - is_valid?: boolean; - reason?: string | null; - responds_to_https?: boolean; - enforces_https?: boolean; - https_error?: string | null; - is_https_eligible?: boolean | null; - caa_error?: string | null; - }; - alt_domain?: { - host?: string; - uri?: string; - nameservers?: string; - dns_resolves?: boolean; - is_proxied?: boolean | null; - is_cloudflare_ip?: boolean | null; - is_fastly_ip?: boolean | null; - is_old_ip_address?: boolean | null; - is_a_record?: boolean | null; - has_cname_record?: boolean | null; - has_mx_records_present?: boolean | null; - is_valid_domain?: boolean; - is_apex_domain?: boolean; - should_be_a_record?: boolean | null; - is_cname_to_github_user_domain?: boolean | null; - is_cname_to_pages_dot_github_dot_com?: boolean | null; - is_cname_to_fastly?: boolean | null; - is_pointed_to_github_pages_ip?: boolean | null; - is_non_github_pages_ip_present?: boolean | null; - is_pages_domain?: boolean; - is_served_by_pages?: boolean | null; - is_valid?: boolean; - reason?: string | null; - responds_to_https?: boolean; - enforces_https?: boolean; - https_error?: string | null; - is_https_eligible?: boolean | null; - caa_error?: string | null; - } | null; - }; - /** Pull requests let you tell others about changes you've pushed to a repository on GitHub. Once a pull request is sent, interested parties can review the set of changes, discuss potential modifications, and even push follow-up commits if necessary. */ - "pull-request": { - url: string; - id: number; - node_id: string; - html_url: string; - diff_url: string; - patch_url: string; - issue_url: string; - commits_url: string; - review_comments_url: string; - review_comment_url: string; - comments_url: string; - statuses_url: string; - /** Number uniquely identifying the pull request within its repository. */ - number: number; - /** State of this Pull Request. Either `open` or `closed`. */ - state: "open" | "closed"; - locked: boolean; - /** The title of the pull request. */ - title: string; - user: components["schemas"]["simple-user"] | null; - body: string | null; - labels: { - id?: number; - node_id?: string; - url?: string; - name?: string; - description?: string | null; - color?: string; - default?: boolean; - }[]; - milestone: components["schemas"]["milestone"] | null; - active_lock_reason?: string | null; - created_at: string; - updated_at: string; - closed_at: string | null; - merged_at: string | null; - merge_commit_sha: string | null; - assignee: components["schemas"]["simple-user"] | null; - assignees?: components["schemas"]["simple-user"][] | null; - requested_reviewers?: components["schemas"]["simple-user"][] | null; - requested_teams?: components["schemas"]["team-simple"][] | null; - head: { - label: string; - ref: string; - repo: { - archive_url: string; - assignees_url: string; - blobs_url: string; - branches_url: string; - collaborators_url: string; - comments_url: string; - commits_url: string; - compare_url: string; - contents_url: string; - contributors_url: string; - deployments_url: string; - description: string | null; - downloads_url: string; - events_url: string; - fork: boolean; - forks_url: string; - full_name: string; - git_commits_url: string; - git_refs_url: string; - git_tags_url: string; - hooks_url: string; - html_url: string; - id: number; - node_id: string; - issue_comment_url: string; - issue_events_url: string; - issues_url: string; - keys_url: string; - labels_url: string; - languages_url: string; - merges_url: string; - milestones_url: string; - name: string; - notifications_url: string; - owner: { - avatar_url: string; - events_url: string; - followers_url: string; - following_url: string; - gists_url: string; - gravatar_id: string | null; - html_url: string; - id: number; - node_id: string; - login: string; - organizations_url: string; - received_events_url: string; - repos_url: string; - site_admin: boolean; - starred_url: string; - subscriptions_url: string; - type: string; - url: string; - }; - private: boolean; - pulls_url: string; - releases_url: string; - stargazers_url: string; - statuses_url: string; - subscribers_url: string; - subscription_url: string; - tags_url: string; - teams_url: string; - trees_url: string; - url: string; - clone_url: string; - default_branch: string; - forks: number; - forks_count: number; - git_url: string; - has_downloads: boolean; - has_issues: boolean; - has_projects: boolean; - has_wiki: boolean; - has_pages: boolean; - homepage: string | null; - language: string | null; - master_branch?: string; - archived: boolean; - disabled: boolean; - mirror_url: string | null; - open_issues: number; - open_issues_count: number; - permissions?: { - admin: boolean; - pull: boolean; - push: boolean; - }; - temp_clone_token?: string; - allow_merge_commit?: boolean; - allow_squash_merge?: boolean; - allow_rebase_merge?: boolean; - license: { - key: string; - name: string; - url: string | null; - spdx_id: string | null; - node_id: string; - } | null; - pushed_at: string; - size: number; - ssh_url: string; - stargazers_count: number; - svn_url: string; - topics?: string[]; - watchers: number; - watchers_count: number; - created_at: string; - updated_at: string; - }; - sha: string; - user: { - avatar_url: string; - events_url: string; - followers_url: string; - following_url: string; - gists_url: string; - gravatar_id: string | null; - html_url: string; - id: number; - node_id: string; - login: string; - organizations_url: string; - received_events_url: string; - repos_url: string; - site_admin: boolean; - starred_url: string; - subscriptions_url: string; - type: string; - url: string; - }; - }; - base: { - label: string; - ref: string; - repo: { - archive_url: string; - assignees_url: string; - blobs_url: string; - branches_url: string; - collaborators_url: string; - comments_url: string; - commits_url: string; - compare_url: string; - contents_url: string; - contributors_url: string; - deployments_url: string; - description: string | null; - downloads_url: string; - events_url: string; - fork: boolean; - forks_url: string; - full_name: string; - git_commits_url: string; - git_refs_url: string; - git_tags_url: string; - hooks_url: string; - html_url: string; - id: number; - node_id: string; - issue_comment_url: string; - issue_events_url: string; - issues_url: string; - keys_url: string; - labels_url: string; - languages_url: string; - merges_url: string; - milestones_url: string; - name: string; - notifications_url: string; - owner: { - avatar_url: string; - events_url: string; - followers_url: string; - following_url: string; - gists_url: string; - gravatar_id: string | null; - html_url: string; - id: number; - node_id: string; - login: string; - organizations_url: string; - received_events_url: string; - repos_url: string; - site_admin: boolean; - starred_url: string; - subscriptions_url: string; - type: string; - url: string; - }; - private: boolean; - pulls_url: string; - releases_url: string; - stargazers_url: string; - statuses_url: string; - subscribers_url: string; - subscription_url: string; - tags_url: string; - teams_url: string; - trees_url: string; - url: string; - clone_url: string; - default_branch: string; - forks: number; - forks_count: number; - git_url: string; - has_downloads: boolean; - has_issues: boolean; - has_projects: boolean; - has_wiki: boolean; - has_pages: boolean; - homepage: string | null; - language: string | null; - master_branch?: string; - archived: boolean; - disabled: boolean; - mirror_url: string | null; - open_issues: number; - open_issues_count: number; - permissions?: { - admin: boolean; - pull: boolean; - push: boolean; - }; - temp_clone_token?: string; - allow_merge_commit?: boolean; - allow_squash_merge?: boolean; - allow_rebase_merge?: boolean; - license: components["schemas"]["license-simple"] | null; - pushed_at: string; - size: number; - ssh_url: string; - stargazers_count: number; - svn_url: string; - topics?: string[]; - watchers: number; - watchers_count: number; - created_at: string; - updated_at: string; - }; - sha: string; - user: { - avatar_url: string; - events_url: string; - followers_url: string; - following_url: string; - gists_url: string; - gravatar_id: string | null; - html_url: string; - id: number; - node_id: string; - login: string; - organizations_url: string; - received_events_url: string; - repos_url: string; - site_admin: boolean; - starred_url: string; - subscriptions_url: string; - type: string; - url: string; - }; - }; - _links: { - comments: components["schemas"]["link"]; - commits: components["schemas"]["link"]; - statuses: components["schemas"]["link"]; - html: components["schemas"]["link"]; - issue: components["schemas"]["link"]; - review_comments: components["schemas"]["link"]; - review_comment: components["schemas"]["link"]; - self: components["schemas"]["link"]; - }; - author_association: components["schemas"]["author_association"]; - auto_merge: components["schemas"]["auto_merge"]; - /** Indicates whether or not the pull request is a draft. */ - draft?: boolean; - merged: boolean; - mergeable: boolean | null; - rebaseable?: boolean | null; - mergeable_state: string; - merged_by: components["schemas"]["simple-user"] | null; - comments: number; - review_comments: number; - /** Indicates whether maintainers can modify the pull request. */ - maintainer_can_modify: boolean; - commits: number; - additions: number; - deletions: number; - changed_files: number; - }; - /** Pull Request Review Comments are comments on a portion of the Pull Request's diff. */ - "pull-request-review-comment": { - /** URL for the pull request review comment */ - url: string; - /** The ID of the pull request review to which the comment belongs. */ - pull_request_review_id: number | null; - /** The ID of the pull request review comment. */ - id: number; - /** The node ID of the pull request review comment. */ - node_id: string; - /** The diff of the line that the comment refers to. */ - diff_hunk: string; - /** The relative path of the file to which the comment applies. */ - path: string; - /** The line index in the diff to which the comment applies. */ - position: number; - /** The index of the original line in the diff to which the comment applies. */ - original_position: number; - /** The SHA of the commit to which the comment applies. */ - commit_id: string; - /** The SHA of the original commit to which the comment applies. */ - original_commit_id: string; - /** The comment ID to reply to. */ - in_reply_to_id?: number; - user: components["schemas"]["simple-user"]; - /** The text of the comment. */ - body: string; - created_at: string; - updated_at: string; - /** HTML URL for the pull request review comment. */ - html_url: string; - /** URL for the pull request that the review comment belongs to. */ - pull_request_url: string; - author_association: components["schemas"]["author_association"]; - _links: { - self: { - href: string; - }; - html: { - href: string; - }; - pull_request: { - href: string; - }; - }; - /** The first line of the range for a multi-line comment. */ - start_line?: number | null; - /** The first line of the range for a multi-line comment. */ - original_start_line?: number | null; - /** The side of the first line of the range for a multi-line comment. */ - start_side?: ("LEFT" | "RIGHT") | null; - /** The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ - line?: number; - /** The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ - original_line?: number; - /** The side of the diff to which the comment applies. The side of the last line of the range for a multi-line comment */ - side?: "LEFT" | "RIGHT"; - reactions?: components["schemas"]["reaction-rollup"]; - body_html?: string; - body_text?: string; - }; - /** Pull Request Merge Result */ - "pull-request-merge-result": { - sha: string; - merged: boolean; - message: string; - }; - /** Pull Request Review Request */ - "pull-request-review-request": { - users: components["schemas"]["simple-user"][]; - teams: components["schemas"]["team-simple"][]; - }; - /** Pull Request Reviews are reviews on pull requests. */ - "pull-request-review": { - /** Unique identifier of the review */ - id: number; - node_id: string; - user: components["schemas"]["simple-user"] | null; - /** The text of the review. */ - body: string; - state: string; - html_url: string; - pull_request_url: string; - _links: { - html: { - href: string; - }; - pull_request: { - href: string; - }; - }; - submitted_at?: string; - /** A commit SHA for the review. */ - commit_id: string; - body_html?: string; - body_text?: string; - author_association: components["schemas"]["author_association"]; - }; - /** Legacy Review Comment */ - "review-comment": { - url: string; - pull_request_review_id: number | null; - id: number; - node_id: string; - diff_hunk: string; - path: string; - position: number | null; - original_position: number; - commit_id: string; - original_commit_id: string; - in_reply_to_id?: number; - user: components["schemas"]["simple-user"] | null; - body: string; - created_at: string; - updated_at: string; - html_url: string; - pull_request_url: string; - author_association: components["schemas"]["author_association"]; - _links: { - self: components["schemas"]["link"]; - html: components["schemas"]["link"]; - pull_request: components["schemas"]["link"]; - }; - body_text?: string; - body_html?: string; - /** The side of the first line of the range for a multi-line comment. */ - side?: "LEFT" | "RIGHT"; - /** The side of the first line of the range for a multi-line comment. */ - start_side?: ("LEFT" | "RIGHT") | null; - /** The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ - line?: number; - /** The original line of the blob to which the comment applies. The last line of the range for a multi-line comment */ - original_line?: number; - /** The first line of the range for a multi-line comment. */ - start_line?: number | null; - /** The original first line of the range for a multi-line comment. */ - original_start_line?: number | null; - }; - /** Data related to a release. */ - "release-asset": { - url: string; - browser_download_url: string; - id: number; - node_id: string; - /** The file name of the asset. */ - name: string; - label: string | null; - /** State of the release asset. */ - state: "uploaded" | "open"; - content_type: string; - size: number; - download_count: number; - created_at: string; - updated_at: string; - uploader: components["schemas"]["simple-user"] | null; - }; - /** A release. */ - release: { - url: string; - html_url: string; - assets_url: string; - upload_url: string; - tarball_url: string | null; - zipball_url: string | null; - id: number; - node_id: string; - /** The name of the tag. */ - tag_name: string; - /** Specifies the commitish value that determines where the Git tag is created from. */ - target_commitish: string; - name: string | null; - body?: string | null; - /** true to create a draft (unpublished) release, false to create a published one. */ - draft: boolean; - /** Whether to identify the release as a prerelease or a full release. */ - prerelease: boolean; - created_at: string; - published_at: string | null; - author: components["schemas"]["simple-user"]; - assets: components["schemas"]["release-asset"][]; - body_html?: string; - body_text?: string; - /** The URL of the release discussion. */ - discussion_url?: string; - }; - /** Sets the state of the secret scanning alert. Can be either `open` or `resolved`. You must provide `resolution` when you set the state to `resolved`. */ - "secret-scanning-alert-state": "open" | "resolved"; - /** **Required when the `state` is `resolved`.** The reason for resolving the alert. Can be one of `false_positive`, `wont_fix`, `revoked`, or `used_in_tests`. */ - "secret-scanning-alert-resolution": string | null; - "secret-scanning-alert": { - number?: components["schemas"]["alert-number"]; - created_at?: components["schemas"]["alert-created-at"]; - url?: components["schemas"]["alert-url"]; - html_url?: components["schemas"]["alert-html-url"]; - state?: components["schemas"]["secret-scanning-alert-state"]; - resolution?: components["schemas"]["secret-scanning-alert-resolution"]; - /** The time that the alert was resolved in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. */ - resolved_at?: string | null; - resolved_by?: components["schemas"]["simple-user"]; - /** The type of secret that secret scanning detected. */ - secret_type?: string; - /** The secret that was detected. */ - secret?: string; - }; - /** Stargazer */ - stargazer: { - starred_at: string; - user: components["schemas"]["simple-user"] | null; - }; - /** Code Frequency Stat */ - "code-frequency-stat": number[]; - /** Commit Activity */ - "commit-activity": { - days: number[]; - total: number; - week: number; - }; - /** Contributor Activity */ - "contributor-activity": { - author: components["schemas"]["simple-user"] | null; - total: number; - weeks: { - w?: number; - a?: number; - d?: number; - c?: number; - }[]; - }; - "participation-stats": { - all: number[]; - owner: number[]; - }; - /** Repository invitations let you manage who you collaborate with. */ - "repository-subscription": { - /** Determines if notifications should be received from this repository. */ - subscribed: boolean; - /** Determines if all notifications should be blocked from this repository. */ - ignored: boolean; - reason: string | null; - created_at: string; - url: string; - repository_url: string; - }; - /** Tag */ - tag: { - name: string; - commit: { - sha: string; - url: string; - }; - zipball_url: string; - tarball_url: string; - node_id: string; - }; - /** A topic aggregates entities that are related to a subject. */ - topic: { - names: string[]; - }; - traffic: { - timestamp: string; - uniques: number; - count: number; - }; - /** Clone Traffic */ - "clone-traffic": { - count: number; - uniques: number; - clones: components["schemas"]["traffic"][]; - }; - /** Content Traffic */ - "content-traffic": { - path: string; - title: string; - count: number; - uniques: number; - }; - /** Referrer Traffic */ - "referrer-traffic": { - referrer: string; - count: number; - uniques: number; - }; - /** View Traffic */ - "view-traffic": { - count: number; - uniques: number; - views: components["schemas"]["traffic"][]; - }; - "scim-group-list-enterprise": { - schemas: string[]; - totalResults: number; - itemsPerPage: number; - startIndex: number; - Resources: { - schemas: string[]; - id: string; - externalId?: string | null; - displayName?: string; - members?: { - value?: string; - $ref?: string; - display?: string; - }[]; - meta?: { - resourceType?: string; - created?: string; - lastModified?: string; - location?: string; - }; - }[]; - }; - "scim-enterprise-group": { - schemas: string[]; - id: string; - externalId?: string | null; - displayName?: string; - members?: { - value?: string; - $ref?: string; - display?: string; - }[]; - meta?: { - resourceType?: string; - created?: string; - lastModified?: string; - location?: string; - }; - }; - "scim-user-list-enterprise": { - schemas: string[]; - totalResults: number; - itemsPerPage: number; - startIndex: number; - Resources: { - schemas: string[]; - id: string; - externalId?: string; - userName?: string; - name?: { - givenName?: string; - familyName?: string; - }; - emails?: { - value?: string; - primary?: boolean; - type?: string; - }[]; - groups?: { - value?: string; - }[]; - active?: boolean; - meta?: { - resourceType?: string; - created?: string; - lastModified?: string; - location?: string; - }; - }[]; - }; - "scim-enterprise-user": { - schemas: string[]; - id: string; - externalId?: string; - userName?: string; - name?: { - givenName?: string; - familyName?: string; - }; - emails?: { - value?: string; - type?: string; - primary?: boolean; - }[]; - groups?: { - value?: string; - }[]; - active?: boolean; - meta?: { - resourceType?: string; - created?: string; - lastModified?: string; - location?: string; - }; - }; - /** SCIM /Users provisioning endpoints */ - "scim-user": { - /** SCIM schema used. */ - schemas: string[]; - /** Unique identifier of an external identity */ - id: string; - /** The ID of the User. */ - externalId: string | null; - /** Configured by the admin. Could be an email, login, or username */ - userName: string | null; - /** The name of the user, suitable for display to end-users */ - displayName?: string | null; - name: { - givenName: string | null; - familyName: string | null; - formatted?: string | null; - }; - /** user emails */ - emails: { - value: string; - primary?: boolean; - }[]; - /** The active status of the User. */ - active: boolean; - meta: { - resourceType?: string; - created?: string; - lastModified?: string; - location?: string; - }; - /** The ID of the organization. */ - organization_id?: number; - /** Set of operations to be performed */ - operations?: { - op: "add" | "remove" | "replace"; - path?: string; - value?: string | { - [key: string]: any; - } | { - [key: string]: any; - }[]; - }[]; - /** associated groups */ - groups?: { - value?: string; - display?: string; - }[]; - }; - /** SCIM User List */ - "scim-user-list": { - /** SCIM schema used. */ - schemas: string[]; - totalResults: number; - itemsPerPage: number; - startIndex: number; - Resources: components["schemas"]["scim-user"][]; - }; - "search-result-text-matches": { - object_url?: string; - object_type?: string | null; - property?: string; - fragment?: string; - matches?: { - text?: string; - indices?: number[]; - }[]; - }[]; - /** Code Search Result Item */ - "code-search-result-item": { - name: string; - path: string; - sha: string; - url: string; - git_url: string; - html_url: string; - repository: components["schemas"]["minimal-repository"]; - score: number; - file_size?: number; - language?: string | null; - last_modified_at?: string; - line_numbers?: string[]; - text_matches?: components["schemas"]["search-result-text-matches"]; - }; - /** Commit Search Result Item */ - "commit-search-result-item": { - url: string; - sha: string; - html_url: string; - comments_url: string; - commit: { - author: { - name: string; - email: string; - date: string; - }; - committer: components["schemas"]["git-user"] | null; - comment_count: number; - message: string; - tree: { - sha: string; - url: string; - }; - url: string; - verification?: components["schemas"]["verification"]; - }; - author: components["schemas"]["simple-user"] | null; - committer: components["schemas"]["git-user"] | null; - parents: { - url?: string; - html_url?: string; - sha?: string; - }[]; - repository: components["schemas"]["minimal-repository"]; - score: number; - node_id: string; - text_matches?: components["schemas"]["search-result-text-matches"]; - }; - /** Issue Search Result Item */ - "issue-search-result-item": { - url: string; - repository_url: string; - labels_url: string; - comments_url: string; - events_url: string; - html_url: string; - id: number; - node_id: string; - number: number; - title: string; - locked: boolean; - active_lock_reason?: string | null; - assignees?: components["schemas"]["simple-user"][] | null; - user: components["schemas"]["simple-user"] | null; - labels: { - id?: number; - node_id?: string; - url?: string; - name?: string; - color?: string; - default?: boolean; - description?: string | null; - }[]; - state: string; - assignee: components["schemas"]["simple-user"] | null; - milestone: components["schemas"]["milestone"] | null; - comments: number; - created_at: string; - updated_at: string; - closed_at: string | null; - text_matches?: components["schemas"]["search-result-text-matches"]; - pull_request?: { - merged_at?: string | null; - diff_url: string | null; - html_url: string | null; - patch_url: string | null; - url: string | null; - }; - body?: string; - score: number; - author_association: components["schemas"]["author_association"]; - draft?: boolean; - repository?: components["schemas"]["repository"]; - body_html?: string; - body_text?: string; - timeline_url?: string; - performed_via_github_app?: components["schemas"]["integration"] | null; - }; - /** Label Search Result Item */ - "label-search-result-item": { - id: number; - node_id: string; - url: string; - name: string; - color: string; - default: boolean; - description: string | null; - score: number; - text_matches?: components["schemas"]["search-result-text-matches"]; - }; - /** Repo Search Result Item */ - "repo-search-result-item": { - id: number; - node_id: string; - name: string; - full_name: string; - owner: components["schemas"]["simple-user"] | null; - private: boolean; - html_url: string; - description: string | null; - fork: boolean; - url: string; - created_at: string; - updated_at: string; - pushed_at: string; - homepage: string | null; - size: number; - stargazers_count: number; - watchers_count: number; - language: string | null; - forks_count: number; - open_issues_count: number; - master_branch?: string; - default_branch: string; - score: number; - forks_url: string; - keys_url: string; - collaborators_url: string; - teams_url: string; - hooks_url: string; - issue_events_url: string; - events_url: string; - assignees_url: string; - branches_url: string; - tags_url: string; - blobs_url: string; - git_tags_url: string; - git_refs_url: string; - trees_url: string; - statuses_url: string; - languages_url: string; - stargazers_url: string; - contributors_url: string; - subscribers_url: string; - subscription_url: string; - commits_url: string; - git_commits_url: string; - comments_url: string; - issue_comment_url: string; - contents_url: string; - compare_url: string; - merges_url: string; - archive_url: string; - downloads_url: string; - issues_url: string; - pulls_url: string; - milestones_url: string; - notifications_url: string; - labels_url: string; - releases_url: string; - deployments_url: string; - git_url: string; - ssh_url: string; - clone_url: string; - svn_url: string; - forks: number; - open_issues: number; - watchers: number; - topics?: string[]; - mirror_url: string | null; - has_issues: boolean; - has_projects: boolean; - has_pages: boolean; - has_wiki: boolean; - has_downloads: boolean; - archived: boolean; - /** Returns whether or not this repository disabled. */ - disabled: boolean; - license: components["schemas"]["license-simple"] | null; - permissions?: { - admin: boolean; - pull: boolean; - push: boolean; - }; - text_matches?: components["schemas"]["search-result-text-matches"]; - temp_clone_token?: string; - allow_merge_commit?: boolean; - allow_squash_merge?: boolean; - allow_rebase_merge?: boolean; - delete_branch_on_merge?: boolean; - }; - /** Topic Search Result Item */ - "topic-search-result-item": { - name: string; - display_name: string | null; - short_description: string | null; - description: string | null; - created_by: string | null; - released: string | null; - created_at: string; - updated_at: string; - featured: boolean; - curated: boolean; - score: number; - repository_count?: number | null; - logo_url?: string | null; - text_matches?: components["schemas"]["search-result-text-matches"]; - related?: { - topic_relation?: { - id?: number; - name?: string; - topic_id?: number; - relation_type?: string; - }; - }[] | null; - aliases?: { - topic_relation?: { - id?: number; - name?: string; - topic_id?: number; - relation_type?: string; - }; - }[] | null; - }; - /** User Search Result Item */ - "user-search-result-item": { - login: string; - id: number; - node_id: string; - avatar_url: string; - gravatar_id: string | null; - url: string; - html_url: string; - followers_url: string; - subscriptions_url: string; - organizations_url: string; - repos_url: string; - received_events_url: string; - type: string; - score: number; - following_url: string; - gists_url: string; - starred_url: string; - events_url: string; - public_repos?: number; - public_gists?: number; - followers?: number; - following?: number; - created_at?: string; - updated_at?: string; - name?: string | null; - bio?: string | null; - email?: string | null; - location?: string | null; - site_admin: boolean; - hireable?: boolean | null; - text_matches?: components["schemas"]["search-result-text-matches"]; - blog?: string | null; - company?: string | null; - suspended_at?: string | null; - }; - /** Private User */ - "private-user": { - login: string; - id: number; - node_id: string; - avatar_url: string; - gravatar_id: string | null; - url: string; - html_url: string; - followers_url: string; - following_url: string; - gists_url: string; - starred_url: string; - subscriptions_url: string; - organizations_url: string; - repos_url: string; - events_url: string; - received_events_url: string; - type: string; - site_admin: boolean; - name: string | null; - company: string | null; - blog: string | null; - location: string | null; - email: string | null; - hireable: boolean | null; - bio: string | null; - twitter_username?: string | null; - public_repos: number; - public_gists: number; - followers: number; - following: number; - created_at: string; - updated_at: string; - private_gists: number; - total_private_repos: number; - owned_private_repos: number; - disk_usage: number; - collaborators: number; - two_factor_authentication: boolean; - plan?: { - collaborators: number; - name: string; - space: number; - private_repos: number; - }; - suspended_at?: string | null; - business_plus?: boolean; - ldap_dn?: string; - }; - /** Public User */ - "public-user": { - login: string; - id: number; - node_id: string; - avatar_url: string; - gravatar_id: string | null; - url: string; - html_url: string; - followers_url: string; - following_url: string; - gists_url: string; - starred_url: string; - subscriptions_url: string; - organizations_url: string; - repos_url: string; - events_url: string; - received_events_url: string; - type: string; - site_admin: boolean; - name: string | null; - company: string | null; - blog: string | null; - location: string | null; - email: string | null; - hireable: boolean | null; - bio: string | null; - twitter_username?: string | null; - public_repos: number; - public_gists: number; - followers: number; - following: number; - created_at: string; - updated_at: string; - plan?: { - collaborators: number; - name: string; - space: number; - private_repos: number; - }; - suspended_at?: string | null; - private_gists?: number; - total_private_repos?: number; - owned_private_repos?: number; - disk_usage?: number; - collaborators?: number; - }; - /** Email */ - email: { - email: string; - primary: boolean; - verified: boolean; - visibility: string | null; - }; - /** A unique encryption key */ - "gpg-key": { - id: number; - primary_key_id: number | null; - key_id: string; - public_key: string; - emails: { - email?: string; - verified?: boolean; - }[]; - subkeys: { - id?: number; - primary_key_id?: number; - key_id?: string; - public_key?: string; - emails?: { - [key: string]: any; - }[]; - subkeys?: { - [key: string]: any; - }[]; - can_sign?: boolean; - can_encrypt_comms?: boolean; - can_encrypt_storage?: boolean; - can_certify?: boolean; - created_at?: string; - expires_at?: string | null; - raw_key?: string | null; - }[]; - can_sign: boolean; - can_encrypt_comms: boolean; - can_encrypt_storage: boolean; - can_certify: boolean; - created_at: string; - expires_at: string | null; - raw_key: string | null; - }; - /** Key */ - key: { - key: string; - id: number; - url: string; - title: string; - created_at: string; - verified: boolean; - read_only: boolean; - }; - "marketplace-account": { - url: string; - id: number; - type: string; - node_id?: string; - login: string; - email?: string | null; - organization_billing_email?: string | null; - }; - /** User Marketplace Purchase */ - "user-marketplace-purchase": { - billing_cycle: string; - next_billing_date: string | null; - unit_count: number | null; - on_free_trial: boolean; - free_trial_ends_on: string | null; - updated_at: string | null; - account: components["schemas"]["marketplace-account"]; - plan: components["schemas"]["marketplace-listing-plan"]; - }; - /** Starred Repository */ - "starred-repository": { - starred_at: string; - repo: components["schemas"]["repository"]; - }; - /** Hovercard */ - hovercard: { - contexts: { - message: string; - octicon: string; - }[]; - }; - /** Key Simple */ - "key-simple": { - id: number; - key: string; - }; - }; - responses: { - /** Resource not found */ - not_found: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - /** Validation failed */ - validation_failed_simple: { - content: { - "application/json": components["schemas"]["validation-error-simple"]; - }; - }; - /** Preview header missing */ - preview_header_missing: { - content: { - "application/json": { - message: string; - documentation_url: string; - }; - }; - }; - /** Forbidden */ - forbidden: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - /** Requires authentication */ - requires_authentication: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - /** Validation failed */ - validation_failed: { - content: { - "application/json": components["schemas"]["validation-error"]; - }; - }; - /** Not modified */ - not_modified: unknown; - /** Gone */ - gone: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - /** Service unavailable */ - service_unavailable: { - content: { - "application/json": { - code?: string; - message?: string; - documentation_url?: string; - }; - }; - }; - /** Forbidden Gist */ - forbidden_gist: { - content: { - "application/json": { - block?: { - reason?: string; - created_at?: string; - html_url?: string | null; - }; - message?: string; - documentation_url?: string; - }; - }; - }; - /** Moved permanently */ - moved_permanently: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - /** Conflict */ - conflict: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - /** Response if GitHub Advanced Security is not enabled for this repository */ - code_scanning_forbidden_read: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - /** Response if the repository is archived or if github advanced security is not enabled for this repository */ - code_scanning_forbidden_write: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - /** Bad Request */ - bad_request: { - content: { - "application/json": components["schemas"]["basic-error"]; - "application/scim+json": components["schemas"]["scim-error"]; - }; - }; - /** Internal Error */ - internal_error: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - /** Found */ - found: unknown; - /** Accepted */ - accepted: { - content: { - "application/json": { - [key: string]: any; - }; - }; - }; - /** A header with no content is returned. */ - no_content: unknown; - /** Resource not found */ - scim_not_found: { - content: { - "application/json": components["schemas"]["scim-error"]; - "application/scim+json": components["schemas"]["scim-error"]; - }; - }; - /** Forbidden */ - scim_forbidden: { - content: { - "application/json": components["schemas"]["scim-error"]; - "application/scim+json": components["schemas"]["scim-error"]; - }; - }; - /** Bad Request */ - scim_bad_request: { - content: { - "application/json": components["schemas"]["scim-error"]; - "application/scim+json": components["schemas"]["scim-error"]; - }; - }; - /** Internal Error */ - scim_internal_error: { - content: { - "application/json": components["schemas"]["scim-error"]; - "application/scim+json": components["schemas"]["scim-error"]; - }; - }; - /** Conflict */ - scim_conflict: { - content: { - "application/json": components["schemas"]["scim-error"]; - "application/scim+json": components["schemas"]["scim-error"]; - }; - }; - }; - parameters: { - /** Results per page (max 100). */ - per_page: number; - /** Page number of the results to fetch. */ - page: number; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since: string; - /** installation_id parameter */ - installation_id: number; - /** grant_id parameter */ - grant_id: number; - /** The client ID of your GitHub app. */ - "client-id": string; - "access-token": string; - app_slug: string; - /** authorization_id parameter */ - authorization_id: number; - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: string; - /** Unique identifier of an organization. */ - org_id: number; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: number; - /** Unique identifier of the self-hosted runner. */ - runner_id: number; - /** A search phrase. For more information, see [Searching the audit log](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization#searching-the-audit-log). */ - "audit-log-phrase": string; - /** - * The event types to include: - * - * - `web` - returns web (non-Git) events - * - `git` - returns Git events - * - `all` - returns both web and Git events - * - * The default is `web`. - */ - "audit-log-include": "web" | "git" | "all"; - /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ - "audit-log-after": string; - /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ - "audit-log-before": string; - /** - * The order of audit log events. To list newest events first, specify `desc`. To list oldest events first, specify `asc`. - * - * The default is `desc`. - */ - "audit-log-order": "desc" | "asc"; - /** gist_id parameter */ - gist_id: string; - /** comment_id parameter */ - comment_id: number; - /** A list of comma separated label names. Example: `bug,ui,@high` */ - labels: string; - /** One of `asc` (ascending) or `desc` (descending). */ - direction: "asc" | "desc"; - /** account_id parameter */ - account_id: number; - /** plan_id parameter */ - plan_id: number; - /** One of `created` (when the repository was starred) or `updated` (when it was last pushed to). */ - sort: "created" | "updated"; - owner: string; - repo: string; - /** If `true`, show notifications marked as read. */ - all: boolean; - /** If `true`, only shows notifications in which the user is directly participating or mentioned. */ - participating: boolean; - /** Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - before: string; - /** thread_id parameter */ - thread_id: number; - /** An organization ID. Only return organizations with an ID greater than this ID. */ - "since-org": number; - org: string; - repository_id: number; - /** secret_name parameter */ - secret_name: string; - username: string; - "hook-id": number; - /** invitation_id parameter */ - invitation_id: number; - /** migration_id parameter */ - migration_id: number; - /** repo_name parameter */ - repo_name: string; - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: "npm" | "maven" | "rubygems" | "docker" | "nuget" | "container"; - /** The name of the package. */ - package_name: string; - /** Unique identifier of the package version. */ - package_version_id: number; - /** team_slug parameter */ - team_slug: string; - "discussion-number": number; - "comment-number": number; - "reaction-id": number; - "project-id": number; - /** card_id parameter */ - card_id: number; - /** column_id parameter */ - column_id: number; - /** artifact_id parameter */ - artifact_id: number; - /** job_id parameter */ - job_id: number; - /** Returns someone's workflow runs. Use the login for the user who created the `push` associated with the check suite or workflow run. */ - actor: string; - /** Returns workflow runs associated with a branch. Use the name of the branch of the `push`. */ - "workflow-run-branch": string; - /** Returns workflow run triggered by the event you specify. For example, `push`, `pull_request` or `issue`. For more information, see "[Events that trigger workflows](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/events-that-trigger-workflows)." */ - event: string; - /** Returns workflow runs with the check run `status` or `conclusion` that you specify. For example, a conclusion can be `success` or a status can be `in_progress`. Only GitHub can set a status of `waiting` or `requested`. For a list of the possible `status` and `conclusion` options, see "[Create a check run](https://docs.github.com/rest/reference/checks#create-a-check-run)." */ - "workflow-run-status": "completed" | "action_required" | "cancelled" | "failure" | "neutral" | "skipped" | "stale" | "success" | "timed_out" | "in_progress" | "queued" | "requested" | "waiting"; - /** The id of the workflow run */ - "run-id": number; - /** The ID of the workflow. You can also pass the workflow file name as a string. */ - "workflow-id": number | string; - /** The name of the branch. */ - branch: string; - /** check_run_id parameter */ - check_run_id: number; - /** check_suite_id parameter */ - check_suite_id: number; - /** Returns check runs with the specified `name`. */ - check_name: string; - /** Returns check runs with the specified `status`. Can be one of `queued`, `in_progress`, or `completed`. */ - status: "queued" | "in_progress" | "completed"; - /** The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ - tool_name: components["schemas"]["code-scanning-analysis-tool-name"]; - /** The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ - tool_guid: components["schemas"]["code-scanning-analysis-tool-guid"]; - /** The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ - git_ref: components["schemas"]["code-scanning-ref"]; - /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ - alert_number: components["schemas"]["alert-number"]; - /** commit_sha parameter */ - commit_sha: string; - /** Results per page (max 100) */ - "per-page": number; - /** deployment_id parameter */ - deployment_id: number; - /** The name of the environment */ - environment_name: string; - /** A user ID. Only return users with an ID greater than this ID. */ - "since-user": number; - /** issue_number parameter */ - issue_number: number; - /** key_id parameter */ - key_id: number; - /** milestone_number parameter */ - milestone_number: number; - "pull-number": number; - /** review_id parameter */ - review_id: number; - /** asset_id parameter */ - asset_id: number; - /** release_id parameter */ - release_id: number; - /** Must be one of: `day`, `week`. */ - per: "" | "day" | "week"; - /** A repository ID. Only return repositories with an ID greater than this ID. */ - "since-repo": number; - /** Used for pagination: the index of the first result to return. */ - start_index: number; - /** Used for pagination: the number of results to return. */ - count: number; - /** Identifier generated by the GitHub SCIM endpoint. */ - scim_group_id: string; - /** scim_user_id parameter */ - scim_user_id: string; - /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ - order: "desc" | "asc"; - "team-id": number; - /** gpg_key_id parameter */ - gpg_key_id: number; - }; - headers: { - link?: string; - "content-type"?: string; - "x-common-marker-version"?: string; - "x-rate-limit-limit"?: number; - "x-rate-limit-remaining"?: number; - "x-rate-limit-reset"?: number; - location?: string; - }; -} -export interface operations { - /** Get Hypermedia links to resources accessible in GitHub's REST API */ - "meta/root": { - responses: { - /** Response */ - 200: { - content: { - "application/json": { - current_user_url: string; - current_user_authorizations_html_url: string; - authorizations_url: string; - code_search_url: string; - commit_search_url: string; - emails_url: string; - emojis_url: string; - events_url: string; - feeds_url: string; - followers_url: string; - following_url: string; - gists_url: string; - hub_url: string; - issue_search_url: string; - issues_url: string; - keys_url: string; - label_search_url: string; - notifications_url: string; - organization_url: string; - organization_repositories_url: string; - organization_teams_url: string; - public_gists_url: string; - rate_limit_url: string; - repository_url: string; - repository_search_url: string; - current_user_repositories_url: string; - starred_url: string; - starred_gists_url: string; - topic_search_url?: string; - user_url: string; - user_organizations_url: string; - user_repositories_url: string; - user_search_url: string; - }; - }; - }; - }; - }; - /** - * Returns the GitHub App associated with the authentication credentials used. To see how many app installations are associated with this GitHub App, see the `installations_count` in the response. For more details about your app's installations, see the "[List installations for the authenticated app](https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app)" endpoint. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - "apps/get-authenticated": { - parameters: {}; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["integration"]; - }; - }; - }; - }; - /** Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`. */ - "apps/create-from-manifest": { - parameters: { - path: { - code: string; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["integration"] & ({ - client_id: string; - client_secret: string; - webhook_secret: string | null; - pem: string; - } & { - [key: string]: any; - }); - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - [key: string]: any; - }; - }; - }; - }; - /** - * Returns the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - "apps/get-webhook-config-for-app": { - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["webhook-config"]; - }; - }; - }; - }; - /** - * Updates the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - "apps/update-webhook-config-for-app": { - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["webhook-config"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - url?: components["schemas"]["webhook-config-url"]; - content_type?: components["schemas"]["webhook-config-content-type"]; - secret?: components["schemas"]["webhook-config-secret"]; - insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; - }; - }; - }; - }; - /** - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - * - * The permissions the installation has are included under the `permissions` key. - */ - "apps/list-installations": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - outdated?: string; - }; - }; - responses: { - /** The permissions the installation has are included under the `permissions` key. */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["installation"][]; - }; - }; - }; - }; - /** - * Enables an authenticated GitHub App to find an installation's information using the installation id. The installation's account type (`target_type`) will be either an organization or a user account, depending which account the repository belongs to. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - "apps/get-installation": { - parameters: { - path: { - /** installation_id parameter */ - installation_id: components["parameters"]["installation_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["installation"]; - }; - }; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** - * Uninstalls a GitHub App on a user, organization, or business account. If you prefer to temporarily suspend an app's access to your account's resources, then we recommend the "[Suspend an app installation](https://docs.github.com/rest/reference/apps/#suspend-an-app-installation)" endpoint. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - "apps/delete-installation": { - parameters: { - path: { - /** installation_id parameter */ - installation_id: components["parameters"]["installation_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Creates an installation access token that enables a GitHub App to make authenticated API requests for the app's installation on an organization or individual account. Installation tokens expire one hour from the time you create them. Using an expired token produces a status code of `401 - Unauthorized`, and requires creating a new installation token. By default the installation token has access to all repositories that the installation can access. To restrict the access to specific repositories, you can provide the `repository_ids` when creating the token. When you omit `repository_ids`, the response does not contain the `repositories` key. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - "apps/create-installation-access-token": { - parameters: { - path: { - /** installation_id parameter */ - installation_id: components["parameters"]["installation_id"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["installation-token"]; - }; - }; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** List of repository names that the token should have access to */ - repositories?: string[]; - /** List of repository IDs that the token should have access to */ - repository_ids?: number[]; - permissions?: components["schemas"]["app-permissions"]; - }; - }; - }; - }; - /** - * Suspends a GitHub App on a user, organization, or business account, which blocks the app from accessing the account's resources. When a GitHub App is suspended, the app's access to the GitHub API or webhook events is blocked for that account. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - "apps/suspend-installation": { - parameters: { - path: { - /** installation_id parameter */ - installation_id: components["parameters"]["installation_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Removes a GitHub App installation suspension. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - "apps/unsuspend-installation": { - parameters: { - path: { - /** installation_id parameter */ - installation_id: components["parameters"]["installation_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - * - * You can use this API to list the set of OAuth applications that have been granted access to your account. Unlike the [list your authorizations](https://docs.github.com/rest/reference/oauth-authorizations#list-your-authorizations) API, this API does not manage individual tokens. This API will return one entry for each OAuth application that has been granted access to your account, regardless of the number of tokens an application has generated for your user. The list of OAuth applications returned matches what is shown on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). The `scopes` returned are the union of scopes authorized for the application. For example, if an application has one token with `repo` scope and another token with `user` scope, the grant will return `["repo", "user"]`. - */ - "oauth-authorizations/list-grants": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** The client ID of your GitHub app. */ - client_id?: string; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["application-grant"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ - "oauth-authorizations/get-grant": { - parameters: { - path: { - /** grant_id parameter */ - grant_id: components["parameters"]["grant_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["application-grant"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - * - * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for your user. Once deleted, the application has no access to your account and is no longer listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). - */ - "oauth-authorizations/delete-grant": { - parameters: { - path: { - /** grant_id parameter */ - grant_id: components["parameters"]["grant_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** - * OAuth application owners can revoke a grant for their OAuth application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid OAuth `access_token` as an input parameter and the grant for the token's owner will be deleted. - * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). - */ - "apps/delete-authorization": { - parameters: { - path: { - /** The client ID of your GitHub app. */ - client_id: components["parameters"]["client-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The OAuth access token used to authenticate to the GitHub API. */ - access_token?: string; - }; - }; - }; - }; - /** - * **Deprecation Notice:** GitHub will discontinue OAuth endpoints that contain `access_token` in the path parameter. We have introduced new endpoints that allow you to securely manage tokens for OAuth Apps by moving `access_token` to the request body. For more information, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/). - * - * OAuth application owners can revoke a grant for their OAuth application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid token as `:access_token` and the grant for the token's owner will be deleted. - * - * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the Applications settings page under "Authorized OAuth Apps" on GitHub](https://github.com/settings/applications#authorized). - */ - "apps/revoke-grant-for-application": { - parameters: { - path: { - /** The client ID of your GitHub app. */ - client_id: components["parameters"]["client-id"]; - access_token: components["parameters"]["access-token"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** OAuth applications can use a special API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to use this endpoint, where the username is the OAuth application `client_id` and the password is its `client_secret`. Invalid tokens will return `404 NOT FOUND`. */ - "apps/check-token": { - parameters: { - path: { - /** The client ID of your GitHub app. */ - client_id: components["parameters"]["client-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["authorization"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The access_token of the OAuth application. */ - access_token: string; - }; - }; - }; - }; - /** OAuth application owners can revoke a single token for an OAuth application. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. */ - "apps/delete-token": { - parameters: { - path: { - /** The client ID of your GitHub app. */ - client_id: components["parameters"]["client-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The OAuth access token used to authenticate to the GitHub API. */ - access_token: string; - }; - }; - }; - }; - /** OAuth applications can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. */ - "apps/reset-token": { - parameters: { - path: { - /** The client ID of your GitHub app. */ - client_id: components["parameters"]["client-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["authorization"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The access_token of the OAuth application. */ - access_token: string; - }; - }; - }; - }; - /** Use a non-scoped user-to-server OAuth access token to create a repository scoped and/or permission scoped user-to-server OAuth access token. You can specify which repositories the token can access and which permissions are granted to the token. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. */ - "apps/scope-token": { - parameters: { - path: { - /** The client ID of your GitHub app. */ - client_id: components["parameters"]["client-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["authorization"]; - }; - }; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The OAuth access token used to authenticate to the GitHub API. */ - access_token: string; - /** The name of the user or organization to scope the user-to-server access token to. **Required** unless `target_id` is specified. */ - target?: string; - /** The ID of the user or organization to scope the user-to-server access token to. **Required** unless `target` is specified. */ - target_id?: number; - /** The list of repository names to scope the user-to-server access token to. `repositories` may not be specified if `repository_ids` is specified. */ - repositories?: string[]; - /** The list of repository IDs to scope the user-to-server access token to. `repository_ids` may not be specified if `repositories` is specified. */ - repository_ids?: number[]; - permissions?: components["schemas"]["app-permissions"]; - }; - }; - }; - }; - /** - * **Deprecation Notice:** GitHub will discontinue OAuth endpoints that contain `access_token` in the path parameter. We have introduced new endpoints that allow you to securely manage tokens for OAuth Apps by moving `access_token` to the request body. For more information, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/). - * - * OAuth applications can use a special API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. - */ - "apps/check-authorization": { - parameters: { - path: { - /** The client ID of your GitHub app. */ - client_id: components["parameters"]["client-id"]; - access_token: components["parameters"]["access-token"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["authorization"] | null; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * **Deprecation Notice:** GitHub will discontinue OAuth endpoints that contain `access_token` in the path parameter. We have introduced new endpoints that allow you to securely manage tokens for OAuth Apps by moving `access_token` to the request body. For more information, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/). - * - * OAuth applications can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. - */ - "apps/reset-authorization": { - parameters: { - path: { - /** The client ID of your GitHub app. */ - client_id: components["parameters"]["client-id"]; - access_token: components["parameters"]["access-token"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["authorization"]; - }; - }; - }; - }; - /** - * **Deprecation Notice:** GitHub will discontinue OAuth endpoints that contain `access_token` in the path parameter. We have introduced new endpoints that allow you to securely manage tokens for OAuth Apps by moving `access_token` to the request body. For more information, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-app-endpoint/). - * - * OAuth application owners can revoke a single token for an OAuth application. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. - */ - "apps/revoke-authorization-for-application": { - parameters: { - path: { - /** The client ID of your GitHub app. */ - client_id: components["parameters"]["client-id"]; - access_token: components["parameters"]["access-token"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * **Note**: The `:app_slug` is just the URL-friendly name of your GitHub App. You can find this on the settings page for your GitHub App (e.g., `https://github.com/settings/apps/:app_slug`). - * - * If the GitHub App you specify is public, you can access this endpoint without authenticating. If the GitHub App you specify is private, you must authenticate with a [personal access token](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - "apps/get-by-slug": { - parameters: { - path: { - app_slug: components["parameters"]["app_slug"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["integration"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ - "oauth-authorizations/list-authorizations": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** The client ID of your GitHub app. */ - client_id?: string; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["authorization"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - * - * **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api). - * - * Creates OAuth tokens using [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication). If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." - * - * To create tokens for a particular OAuth application using this endpoint, you must authenticate as the user you want to create an authorization for and provide the app's client ID and secret, found on your OAuth application's settings page. If your OAuth application intends to create multiple tokens for one user, use `fingerprint` to differentiate between them. - * - * You can also create tokens on GitHub from the [personal access tokens settings](https://github.com/settings/tokens) page. Read more about these tokens in [the GitHub Help documentation](https://help.github.com/articles/creating-an-access-token-for-command-line-use). - * - * Organizations that enforce SAML SSO require personal access tokens to be allowed. Read more about allowing tokens in [the GitHub Help documentation](https://help.github.com/articles/about-identity-and-access-management-with-saml-single-sign-on). - */ - "oauth-authorizations/create-authorization": { - parameters: {}; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["authorization"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 410: components["responses"]["gone"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** A list of scopes that this authorization is in. */ - scopes?: string[] | null; - /** A note to remind you what the OAuth token is for. */ - note?: string; - /** A URL to remind you what app the OAuth token is for. */ - note_url?: string; - /** The OAuth app client key for which to create the token. */ - client_id?: string; - /** The OAuth app client secret for which to create the token. */ - client_secret?: string; - /** A unique string to distinguish an authorization from others created for the same client ID and user. */ - fingerprint?: string; - }; - }; - }; - }; - /** - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - * - * **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api). - * - * Creates a new authorization for the specified OAuth application, only if an authorization for that application doesn't already exist for the user. The URL includes the 20 character client ID for the OAuth app that is requesting the token. It returns the user's existing authorization for the application if one is present. Otherwise, it creates and returns a new one. - * - * If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." - * - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - */ - "oauth-authorizations/get-or-create-authorization-for-app": { - parameters: { - path: { - /** The client ID of your GitHub app. */ - client_id: components["parameters"]["client-id"]; - }; - }; - responses: { - /** if returning an existing token */ - 200: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["authorization"]; - }; - }; - /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["authorization"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The OAuth app client secret for which to create the token. */ - client_secret: string; - /** A list of scopes that this authorization is in. */ - scopes?: string[] | null; - /** A note to remind you what the OAuth token is for. */ - note?: string; - /** A URL to remind you what app the OAuth token is for. */ - note_url?: string; - /** A unique string to distinguish an authorization from others created for the same client ID and user. */ - fingerprint?: string; - }; - }; - }; - }; - /** - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - * - * **Warning:** Apps must use the [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) to obtain OAuth tokens that work with GitHub SAML organizations. OAuth tokens created using the Authorizations API will be unable to access GitHub SAML organizations. For more information, see the [blog post](https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api). - * - * This method will create a new authorization for the specified OAuth application, only if an authorization for that application and fingerprint do not already exist for the user. The URL includes the 20 character client ID for the OAuth app that is requesting the token. `fingerprint` is a unique string to distinguish an authorization from others created for the same client ID and user. It returns the user's existing authorization for the application if one is present. Otherwise, it creates and returns a new one. - * - * If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." - */ - "oauth-authorizations/get-or-create-authorization-for-app-and-fingerprint": { - parameters: { - path: { - /** The client ID of your GitHub app. */ - client_id: components["parameters"]["client-id"]; - fingerprint: string; - }; - }; - responses: { - /** if returning an existing token */ - 200: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["authorization"]; - }; - }; - /** Response if returning a new token */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["authorization"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The OAuth app client secret for which to create the token. */ - client_secret: string; - /** A list of scopes that this authorization is in. */ - scopes?: string[] | null; - /** A note to remind you what the OAuth token is for. */ - note?: string; - /** A URL to remind you what app the OAuth token is for. */ - note_url?: string; - }; - }; - }; - }; - /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ - "oauth-authorizations/get-authorization": { - parameters: { - path: { - /** authorization_id parameter */ - authorization_id: components["parameters"]["authorization_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["authorization"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). */ - "oauth-authorizations/delete-authorization": { - parameters: { - path: { - /** authorization_id parameter */ - authorization_id: components["parameters"]["authorization_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** - * **Deprecation Notice:** GitHub will discontinue the [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations/), which is used by integrations to create personal access tokens and OAuth tokens, and you must now create these tokens using our [web application flow](https://docs.github.com/developers/apps/authorizing-oauth-apps#web-application-flow). The [OAuth Authorizations API](https://docs.github.com/rest/reference/oauth-authorizations) will be removed on November, 13, 2020. For more information, including scheduled brownouts, see the [blog post](https://developer.github.com/changes/2020-02-14-deprecating-oauth-auth-endpoint/). - * - * If you have two-factor authentication setup, Basic Authentication for this endpoint requires that you use a one-time password (OTP) and your username and password instead of tokens. For more information, see "[Working with two-factor authentication](https://docs.github.com/rest/overview/other-authentication-methods#working-with-two-factor-authentication)." - * - * You can only send one of these scope keys at a time. - */ - "oauth-authorizations/update-authorization": { - parameters: { - path: { - /** authorization_id parameter */ - authorization_id: components["parameters"]["authorization_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["authorization"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** A list of scopes that this authorization is in. */ - scopes?: string[] | null; - /** A list of scopes to add to this authorization. */ - add_scopes?: string[]; - /** A list of scopes to remove from this authorization. */ - remove_scopes?: string[]; - /** A note to remind you what the OAuth token is for. */ - note?: string; - /** A URL to remind you what app the OAuth token is for. */ - note_url?: string; - /** A unique string to distinguish an authorization from others created for the same client ID and user. */ - fingerprint?: string; - }; - }; - }; - }; - "codes-of-conduct/get-all-codes-of-conduct": { - parameters: {}; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["code-of-conduct"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - "codes-of-conduct/get-conduct-code": { - parameters: { - path: { - key: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["code-of-conduct"]; - }; - }; - 304: components["responses"]["not_modified"]; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** - * Creates an attachment under a content reference URL in the body or comment of an issue or pull request. Use the `id` of the content reference from the [`content_reference` event](https://docs.github.com/webhooks/event-payloads/#content_reference) to create an attachment. - * - * The app must create a content attachment within six hours of the content reference URL being posted. See "[Using content attachments](https://docs.github.com/apps/using-content-attachments/)" for details about content attachments. - * - * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - "apps/create-content-attachment": { - parameters: { - path: { - content_reference_id: number; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["content-reference-attachment"]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The title of the attachment */ - title: string; - /** The body of the attachment */ - body: string; - }; - }; - }; - }; - /** Lists all the emojis available to use on GitHub. */ - "emojis/get": { - parameters: {}; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - [key: string]: string; - }; - }; - }; - 304: components["responses"]["not_modified"]; - }; - }; - /** - * Gets the GitHub Actions permissions policy for organizations and allowed actions in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/get-github-actions-permissions-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["actions-enterprise-permissions"]; - }; - }; - }; - }; - /** - * Sets the GitHub Actions permissions policy for organizations and allowed actions in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/set-github-actions-permissions-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - enabled_organizations: components["schemas"]["enabled-organizations"]; - allowed_actions?: components["schemas"]["allowed-actions"]; - }; - }; - }; - }; - /** - * Lists the organizations that are selected to have GitHub Actions enabled in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/list-selected-organizations-enabled-github-actions-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - organizations: components["schemas"]["organization-simple"][]; - }; - }; - }; - }; - }; - /** - * Replaces the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/set-selected-organizations-enabled-github-actions-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - /** List of organization IDs to enable for GitHub Actions. */ - selected_organization_ids: number[]; - }; - }; - }; - }; - /** - * Adds an organization to the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/enable-selected-organization-github-actions-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of an organization. */ - org_id: components["parameters"]["org_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Removes an organization from the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/disable-selected-organization-github-actions-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of an organization. */ - org_id: components["parameters"]["org_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Gets the selected actions that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/get-allowed-actions-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["selected-actions"]; - }; - }; - }; - }; - /** - * Sets the actions that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/set-allowed-actions-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": components["schemas"]["selected-actions"]; - }; - }; - }; - /** - * Lists all self-hosted runner groups for an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/list-self-hosted-runner-groups-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - runner_groups: components["schemas"]["runner-groups-enterprise"][]; - }; - }; - }; - }; - }; - /** - * Creates a new self-hosted runner group for an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/create-self-hosted-runner-group-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["runner-groups-enterprise"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** Name of the runner group. */ - name: string; - /** Visibility of a runner group. You can select all organizations or select individual organization. Can be one of: `all` or `selected` */ - visibility?: "selected" | "all"; - /** List of organization IDs that can access the runner group. */ - selected_organization_ids?: number[]; - /** List of runner IDs to add to the runner group. */ - runners?: number[]; - }; - }; - }; - }; - /** - * Gets a specific self-hosted runner group for an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/get-self-hosted-runner-group-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["runner-groups-enterprise"]; - }; - }; - }; - }; - /** - * Deletes a self-hosted runner group for an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/delete-self-hosted-runner-group-from-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Updates the `name` and `visibility` of a self-hosted runner group in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/update-self-hosted-runner-group-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["runner-groups-enterprise"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** Name of the runner group. */ - name?: string; - /** Visibility of a runner group. You can select all organizations or select individual organizations. Can be one of: `all` or `selected` */ - visibility?: "selected" | "all"; - }; - }; - }; - }; - /** - * Lists the organizations with access to a self-hosted runner group. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/list-org-access-to-self-hosted-runner-group-in-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - organizations: components["schemas"]["organization-simple"][]; - }; - }; - }; - }; - }; - /** - * Replaces the list of organizations that have access to a self-hosted runner configured in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/set-org-access-to-self-hosted-runner-group-in-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - /** List of organization IDs that can access the runner group. */ - selected_organization_ids: number[]; - }; - }; - }; - }; - /** - * Adds an organization to the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/add-org-access-to-self-hosted-runner-group-in-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - /** Unique identifier of an organization. */ - org_id: components["parameters"]["org_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Removes an organization from the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)." - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/remove-org-access-to-self-hosted-runner-group-in-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - /** Unique identifier of an organization. */ - org_id: components["parameters"]["org_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Lists the self-hosted runners that are in a specific enterprise group. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/list-self-hosted-runners-in-group-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - runners: components["schemas"]["runner"][]; - }; - }; - }; - }; - }; - /** - * Replaces the list of self-hosted runners that are part of an enterprise runner group. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/set-self-hosted-runners-in-group-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - /** List of runner IDs to add to the runner group. */ - runners: number[]; - }; - }; - }; - }; - /** - * Adds a self-hosted runner to a runner group configured in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` - * scope to use this endpoint. - */ - "enterprise-admin/add-self-hosted-runner-to-group-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - /** Unique identifier of the self-hosted runner. */ - runner_id: components["parameters"]["runner_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Removes a self-hosted runner from a group configured in an enterprise. The runner is then returned to the default group. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/remove-self-hosted-runner-from-group-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - /** Unique identifier of the self-hosted runner. */ - runner_id: components["parameters"]["runner_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Lists all self-hosted runners configured for an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/list-self-hosted-runners-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count?: number; - runners?: components["schemas"]["runner"][]; - }; - }; - }; - }; - }; - /** - * Lists binaries for the runner application that you can download and run. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/list-runner-applications-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["runner-application"][]; - }; - }; - }; - }; - /** - * Returns a token that you can pass to the `config` script. The token expires after one hour. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - * - * #### Example using registration token - * - * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. - * - * ``` - * ./config.sh --url https://github.com/enterprises/octo-enterprise --token TOKEN - * ``` - */ - "enterprise-admin/create-registration-token-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["authentication-token"]; - }; - }; - }; - }; - /** - * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an enterprise. The token expires after one hour. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - * - * #### Example using remove token - * - * To remove your self-hosted runner from an enterprise, replace `TOKEN` with the remove token provided by this - * endpoint. - * - * ``` - * ./config.sh remove --token TOKEN - * ``` - */ - "enterprise-admin/create-remove-token-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["authentication-token"]; - }; - }; - }; - }; - /** - * Gets a specific self-hosted runner configured in an enterprise. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/get-self-hosted-runner-for-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner. */ - runner_id: components["parameters"]["runner_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["runner"]; - }; - }; - }; - }; - /** - * Forces the removal of a self-hosted runner from an enterprise. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. - * - * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. - */ - "enterprise-admin/delete-self-hosted-runner-from-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Unique identifier of the self-hosted runner. */ - runner_id: components["parameters"]["runner_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** Gets the audit log for an enterprise. To use this endpoint, you must be an enterprise admin, and you must use an access token with the `admin:enterprise` scope. */ - "audit-log/get-audit-log": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - query: { - /** A search phrase. For more information, see [Searching the audit log](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization#searching-the-audit-log). */ - phrase?: components["parameters"]["audit-log-phrase"]; - /** - * The event types to include: - * - * - `web` - returns web (non-Git) events - * - `git` - returns Git events - * - `all` - returns both web and Git events - * - * The default is `web`. - */ - include?: components["parameters"]["audit-log-include"]; - /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ - after?: components["parameters"]["audit-log-after"]; - /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ - before?: components["parameters"]["audit-log-before"]; - /** - * The order of audit log events. To list newest events first, specify `desc`. To list oldest events first, specify `asc`. - * - * The default is `desc`. - */ - order?: components["parameters"]["audit-log-order"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["audit-log-event"][]; - }; - }; - }; - }; - /** - * Gets the summary of the free and paid GitHub Actions minutes used. - * - * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * The authenticated user must be an enterprise admin. - */ - "billing/get-github-actions-billing-ghe": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["actions-billing-usage"]; - }; - }; - }; - }; - /** - * Gets the free and paid storage used for GitHub Packages in gigabytes. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * The authenticated user must be an enterprise admin. - */ - "billing/get-github-packages-billing-ghe": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["packages-billing-usage"]; - }; - }; - }; - }; - /** - * Gets the estimated paid and estimated total storage used for GitHub Actions and Github Packages. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * The authenticated user must be an enterprise admin. - */ - "billing/get-shared-storage-billing-ghe": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["combined-billing-usage"]; - }; - }; - }; - }; - /** We delay the public events feed by five minutes, which means the most recent event returned by the public events API actually occurred at least five minutes ago. */ - "activity/list-public-events": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["event"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** - * GitHub provides several timeline resources in [Atom](http://en.wikipedia.org/wiki/Atom_(standard)) format. The Feeds API lists all the feeds available to the authenticated user: - * - * * **Timeline**: The GitHub global public timeline - * * **User**: The public timeline for any user, using [URI template](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) - * * **Current user public**: The public timeline for the authenticated user - * * **Current user**: The private timeline for the authenticated user - * * **Current user actor**: The private timeline for activity created by the authenticated user - * * **Current user organizations**: The private timeline for the organizations the authenticated user is a member of. - * * **Security advisories**: A collection of public announcements that provide information about security-related vulnerabilities in software on GitHub. - * - * **Note**: Private feeds are only returned when [authenticating via Basic Auth](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) since current feed URIs use the older, non revocable auth tokens. - */ - "activity/get-feeds": { - parameters: {}; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["feed"]; - }; - }; - }; - }; - /** Lists the authenticated user's gists or if called anonymously, this endpoint returns all public gists: */ - "gists/list": { - parameters: { - query: { - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["base-gist"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** - * Allows you to add a new gist with one or more files. - * - * **Note:** Don't name your files "gistfile" with a numerical suffix. This is the format of the automatic naming scheme that Gist uses internally. - */ - "gists/create": { - parameters: {}; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["gist-simple"]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Description of the gist */ - description?: string; - /** Names and content for the files that make up the gist */ - files: { - [key: string]: { - /** Content of the file */ - content: string; - }; - }; - public?: boolean | ("true" | "false"); - }; - }; - }; - }; - /** - * List public gists sorted by most recently updated to least recently updated. - * - * Note: With [pagination](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination), you can fetch up to 3000 gists. For example, you can fetch 100 pages with 30 gists per page or 30 pages with 100 gists per page. - */ - "gists/list-public": { - parameters: { - query: { - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["base-gist"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** List the authenticated user's starred gists: */ - "gists/list-starred": { - parameters: { - query: { - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["base-gist"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - "gists/get": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["gist-simple"]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden_gist"]; - 404: components["responses"]["not_found"]; - }; - }; - "gists/delete": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Allows you to update or delete a gist file and rename gist files. Files from the previous version of the gist that aren't explicitly changed during an edit are unchanged. */ - "gists/update": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["gist-simple"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": ((Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; - }>) & { - /** Description of the gist */ - description?: string; - /** Names of files to be updated */ - files?: { - [key: string]: Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; - }>; - }; - }) | null; - }; - }; - }; - "gists/list-comments": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["gist-comment"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - "gists/create-comment": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["gist-comment"]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - requestBody: { - content: { - "application/json": { - /** The comment text. */ - body: string; - }; - }; - }; - }; - "gists/get-comment": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["gist-comment"]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden_gist"]; - 404: components["responses"]["not_found"]; - }; - }; - "gists/delete-comment": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - "gists/update-comment": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["gist-comment"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - requestBody: { - content: { - "application/json": { - /** The comment text. */ - body: string; - }; - }; - }; - }; - "gists/list-commits": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: { - Link?: string; - }; - content: { - "application/json": components["schemas"]["gist-commit"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - "gists/list-forks": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["gist-simple"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** **Note**: This was previously `/gists/:gist_id/fork`. */ - "gists/fork": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["base-gist"]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - "gists/check-is-starred": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - }; - }; - responses: { - /** Response if gist is starred */ - 204: never; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - /** Not Found if gist is not starred */ - 404: { - content: { - "application/json": { - [key: string]: any; - }; - }; - }; - }; - }; - /** Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." */ - "gists/star": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - "gists/unstar": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - "gists/get-revision": { - parameters: { - path: { - /** gist_id parameter */ - gist_id: components["parameters"]["gist_id"]; - sha: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["gist-simple"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** List all templates available to pass as an option when [creating a repository](https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user). */ - "gitignore/get-all-templates": { - parameters: {}; - responses: { - /** Response */ - 200: { - content: { - "application/json": string[]; - }; - }; - 304: components["responses"]["not_modified"]; - }; - }; - /** - * The API also allows fetching the source of a single template. - * Use the raw [media type](https://docs.github.com/rest/overview/media-types/) to get the raw contents. - */ - "gitignore/get-template": { - parameters: { - path: { - name: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["gitignore-template"]; - }; - }; - 304: components["responses"]["not_modified"]; - }; - }; - /** - * List repositories that an app installation can access. - * - * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - "apps/list-repos-accessible-to-installation": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - repositories: components["schemas"]["repository"][]; - repository_selection?: string; - }; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** - * Revokes the installation token you're using to authenticate as an installation and access this endpoint. - * - * Once an installation token is revoked, the token is invalidated and cannot be used. Other endpoints that require the revoked installation token must have a new installation token to work. You can create a new token using the "[Create an installation access token for an app](https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app)" endpoint. - * - * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. - */ - "apps/revoke-installation-access-token": { - parameters: {}; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * List issues assigned to the authenticated user across all visible repositories including owned repositories, member - * repositories, and organization repositories. You can use the `filter` query parameter to fetch issues that are not - * necessarily assigned to you. - * - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - "issues/list": { - parameters: { - query: { - /** - * Indicates which sorts of issues to return. Can be one of: - * \* `assigned`: Issues assigned to you - * \* `created`: Issues created by you - * \* `mentioned`: Issues mentioning you - * \* `subscribed`: Issues you're subscribed to updates for - * \* `all`: All issues the authenticated user can see, regardless of participation or creation - */ - filter?: "assigned" | "created" | "mentioned" | "subscribed" | "repos" | "all"; - /** Indicates the state of the issues to return. Can be either `open`, `closed`, or `all`. */ - state?: "open" | "closed" | "all"; - /** A list of comma separated label names. Example: `bug,ui,@high` */ - labels?: components["parameters"]["labels"]; - /** What to sort results by. Can be either `created`, `updated`, `comments`. */ - sort?: "created" | "updated" | "comments"; - /** One of `asc` (ascending) or `desc` (descending). */ - direction?: components["parameters"]["direction"]; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - collab?: boolean; - orgs?: boolean; - owned?: boolean; - pulls?: boolean; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["issue"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - "licenses/get-all-commonly-used": { - parameters: { - query: { - featured?: boolean; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["license-simple"][]; - }; - }; - 304: components["responses"]["not_modified"]; - }; - }; - "licenses/get": { - parameters: { - path: { - license: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["license"]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - "markdown/render": { - parameters: {}; - responses: { - /** Response */ - 200: { - headers: { - "Content-Length"?: string; - }; - content: { - "text/html": string; - }; - }; - 304: components["responses"]["not_modified"]; - }; - requestBody: { - content: { - "application/json": { - /** The Markdown text to render in HTML. */ - text: string; - /** The rendering mode. */ - mode?: "markdown" | "gfm"; - /** The repository context to use when creating references in `gfm` mode. */ - context?: string; - }; - }; - }; - }; - /** You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less. */ - "markdown/render-raw": { - parameters: {}; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "text/html": string; - }; - }; - 304: components["responses"]["not_modified"]; - }; - requestBody: { - content: { - "text/plain": string; - "text/x-markdown": string; - }; - }; - }; - /** - * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - "apps/get-subscription-plan-for-account": { - parameters: { - path: { - /** account_id parameter */ - account_id: components["parameters"]["account_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["marketplace-purchase"]; - }; - }; - 401: components["responses"]["requires_authentication"]; - /** Not Found when the account has not purchased the listing */ - 404: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - }; - }; - /** - * Lists all plans that are part of your GitHub Marketplace listing. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - "apps/list-plans": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["marketplace-listing-plan"][]; - }; - }; - 401: components["responses"]["requires_authentication"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Returns user and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - "apps/list-accounts-for-plan": { - parameters: { - path: { - /** plan_id parameter */ - plan_id: components["parameters"]["plan_id"]; - }; - query: { - /** One of `created` (when the repository was starred) or `updated` (when it was last pushed to). */ - sort?: components["parameters"]["sort"]; - /** To return the oldest accounts first, set to `asc`. Can be one of `asc` or `desc`. Ignored without the `sort` parameter. */ - direction?: "asc" | "desc"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["marketplace-purchase"][]; - }; - }; - 401: components["responses"]["requires_authentication"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** - * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - "apps/get-subscription-plan-for-account-stubbed": { - parameters: { - path: { - /** account_id parameter */ - account_id: components["parameters"]["account_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["marketplace-purchase"]; - }; - }; - 401: components["responses"]["requires_authentication"]; - /** Not Found when the account has not purchased the listing */ - 404: unknown; - }; - }; - /** - * Lists all plans that are part of your GitHub Marketplace listing. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - "apps/list-plans-stubbed": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["marketplace-listing-plan"][]; - }; - }; - 401: components["responses"]["requires_authentication"]; - }; - }; - /** - * Returns repository and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. - * - * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. - */ - "apps/list-accounts-for-plan-stubbed": { - parameters: { - path: { - /** plan_id parameter */ - plan_id: components["parameters"]["plan_id"]; - }; - query: { - /** One of `created` (when the repository was starred) or `updated` (when it was last pushed to). */ - sort?: components["parameters"]["sort"]; - /** To return the oldest accounts first, set to `asc`. Can be one of `asc` or `desc`. Ignored without the `sort` parameter. */ - direction?: "asc" | "desc"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["marketplace-purchase"][]; - }; - }; - 401: components["responses"]["requires_authentication"]; - }; - }; - /** - * Returns meta information about GitHub, including a list of GitHub's IP addresses. For more information, see "[About GitHub's IP addresses](https://help.github.com/articles/about-github-s-ip-addresses/)." - * - * **Note:** The IP addresses shown in the documentation's response are only example values. You must always query the API directly to get the latest list of IP addresses. - */ - "meta/get": { - parameters: {}; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["api-overview"]; - }; - }; - 304: components["responses"]["not_modified"]; - }; - }; - "activity/list-public-events-for-repo-network": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["event"][]; - }; - }; - 301: components["responses"]["moved_permanently"]; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** List all notifications for the current user, sorted by most recently updated. */ - "activity/list-notifications-for-authenticated-user": { - parameters: { - query: { - /** If `true`, show notifications marked as read. */ - all?: components["parameters"]["all"]; - /** If `true`, only shows notifications in which the user is directly participating or mentioned. */ - participating?: components["parameters"]["participating"]; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - before?: components["parameters"]["before"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["thread"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** Marks all notifications as "read" removes it from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. */ - "activity/mark-notifications-as-read": { - parameters: {}; - responses: { - /** Response */ - 202: { - content: { - "application/json": { - message?: string; - }; - }; - }; - /** Reset Content */ - 205: unknown; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - requestBody: { - content: { - "application/json": { - /** Describes the last point that notifications were checked. */ - last_read_at?: string; - /** Whether the notification has been read. */ - read?: boolean; - }; - }; - }; - }; - "activity/get-thread": { - parameters: { - path: { - /** thread_id parameter */ - thread_id: components["parameters"]["thread_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["thread"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - "activity/mark-thread-as-read": { - parameters: { - path: { - /** thread_id parameter */ - thread_id: components["parameters"]["thread_id"]; - }; - }; - responses: { - /** Reset Content */ - 205: unknown; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** - * This checks to see if the current user is subscribed to a thread. You can also [get a repository subscription](https://docs.github.com/rest/reference/activity#get-a-repository-subscription). - * - * Note that subscriptions are only generated if a user is participating in a conversation--for example, they've replied to the thread, were **@mentioned**, or manually subscribe to a thread. - */ - "activity/get-thread-subscription-for-authenticated-user": { - parameters: { - path: { - /** thread_id parameter */ - thread_id: components["parameters"]["thread_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["thread-subscription"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** - * If you are watching a repository, you receive notifications for all threads by default. Use this endpoint to ignore future notifications for threads until you comment on the thread or get an **@mention**. - * - * You can also use this endpoint to subscribe to threads that you are currently not receiving notifications for or to subscribed to threads that you have previously ignored. - * - * Unsubscribing from a conversation in a repository that you are not watching is functionally equivalent to the [Delete a thread subscription](https://docs.github.com/rest/reference/activity#delete-a-thread-subscription) endpoint. - */ - "activity/set-thread-subscription": { - parameters: { - path: { - /** thread_id parameter */ - thread_id: components["parameters"]["thread_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["thread-subscription"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - requestBody: { - content: { - "application/json": { - /** Whether to block all notifications from a thread. */ - ignored?: boolean; - }; - }; - }; - }; - /** Mutes all future notifications for a conversation until you comment on the thread or get an **@mention**. If you are watching the repository of the thread, you will still receive notifications. To ignore future notifications for a repository you are watching, use the [Set a thread subscription](https://docs.github.com/rest/reference/activity#set-a-thread-subscription) endpoint and set `ignore` to `true`. */ - "activity/delete-thread-subscription": { - parameters: { - path: { - /** thread_id parameter */ - thread_id: components["parameters"]["thread_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** Get the octocat as ASCII art */ - "meta/get-octocat": { - parameters: { - query: { - /** The words to show in Octocat's speech bubble */ - s?: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/octocat-stream": string; - }; - }; - }; - }; - /** - * Lists all organizations, in the order that they were created on GitHub. - * - * **Note:** Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of organizations. - */ - "orgs/list": { - parameters: { - query: { - /** An organization ID. Only return organizations with an ID greater than this ID. */ - since?: components["parameters"]["since-org"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: { - Link?: string; - }; - content: { - "application/json": components["schemas"]["organization-simple"][]; - }; - }; - 304: components["responses"]["not_modified"]; - }; - }; - /** - * To see many of the organization response values, you need to be an authenticated organization owner with the `admin:org` scope. When the value of `two_factor_requirement_enabled` is `true`, the organization requires all members, billing managers, and outside collaborators to enable [two-factor authentication](https://help.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/). - * - * GitHub Apps with the `Organization plan` permission can use this endpoint to retrieve information about an organization's GitHub plan. See "[Authenticating with GitHub Apps](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/)" for details. For an example response, see 'Response with GitHub plan information' below." - */ - "orgs/get": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["organization-full"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * **Parameter Deprecation Notice:** GitHub will replace and discontinue `members_allowed_repository_creation_type` in favor of more granular permissions. The new input parameters are `members_can_create_public_repositories`, `members_can_create_private_repositories` for all organizations and `members_can_create_internal_repositories` for organizations associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see the [blog post](https://developer.github.com/changes/2019-12-03-internal-visibility-changes). - * - * Enables an authenticated organization owner with the `admin:org` scope to update the organization's profile and member privileges. - */ - "orgs/update": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["organization-full"]; - }; - }; - 409: components["responses"]["conflict"]; - 415: components["responses"]["preview_header_missing"]; - /** Validation failed */ - 422: { - content: { - "application/json": components["schemas"]["validation-error"] | components["schemas"]["validation-error-simple"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** Billing email address. This address is not publicized. */ - billing_email?: string; - /** The company name. */ - company?: string; - /** The publicly visible email address. */ - email?: string; - /** The Twitter username of the company. */ - twitter_username?: string; - /** The location. */ - location?: string; - /** The shorthand name of the company. */ - name?: string; - /** The description of the company. */ - description?: string; - /** Toggles whether an organization can use organization projects. */ - has_organization_projects?: boolean; - /** Toggles whether repositories that belong to the organization can use repository projects. */ - has_repository_projects?: boolean; - /** - * Default permission level members have for organization repositories: - * \* `read` - can pull, but not push to or administer this repository. - * \* `write` - can pull and push, but not administer this repository. - * \* `admin` - can pull, push, and administer this repository. - * \* `none` - no permissions granted by default. - */ - default_repository_permission?: "read" | "write" | "admin" | "none"; - /** - * Toggles the ability of non-admin organization members to create repositories. Can be one of: - * \* `true` - all organization members can create repositories. - * \* `false` - only organization owners can create repositories. - * Default: `true` - * **Note:** A parameter can override this parameter. See `members_allowed_repository_creation_type` in this table for details. **Note:** A parameter can override this parameter. See `members_allowed_repository_creation_type` in this table for details. - */ - members_can_create_repositories?: boolean; - /** - * Toggles whether organization members can create internal repositories, which are visible to all enterprise members. You can only allow members to create internal repositories if your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. Can be one of: - * \* `true` - all organization members can create internal repositories. - * \* `false` - only organization owners can create internal repositories. - * Default: `true`. For more information, see "[Restricting repository creation in your organization](https://help.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation. - */ - members_can_create_internal_repositories?: boolean; - /** - * Toggles whether organization members can create private repositories, which are visible to organization members with permission. Can be one of: - * \* `true` - all organization members can create private repositories. - * \* `false` - only organization owners can create private repositories. - * Default: `true`. For more information, see "[Restricting repository creation in your organization](https://help.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation. - */ - members_can_create_private_repositories?: boolean; - /** - * Toggles whether organization members can create public repositories, which are visible to anyone. Can be one of: - * \* `true` - all organization members can create public repositories. - * \* `false` - only organization owners can create public repositories. - * Default: `true`. For more information, see "[Restricting repository creation in your organization](https://help.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation. - */ - members_can_create_public_repositories?: boolean; - /** - * Specifies which types of repositories non-admin organization members can create. Can be one of: - * \* `all` - all organization members can create public and private repositories. - * \* `private` - members can create private repositories. This option is only available to repositories that are part of an organization on GitHub Enterprise Cloud. - * \* `none` - only admin members can create repositories. - * **Note:** This parameter is deprecated and will be removed in the future. Its return value ignores internal repositories. Using this parameter overrides values set in `members_can_create_repositories`. See the parameter deprecation notice in the operation description for details. - */ - members_allowed_repository_creation_type?: "all" | "private" | "none"; - /** - * Toggles whether organization members can create GitHub Pages sites. Can be one of: - * \* `true` - all organization members can create GitHub Pages sites. - * \* `false` - no organization members can create GitHub Pages sites. Existing published sites will not be impacted. - */ - members_can_create_pages?: boolean; - /** - * Toggles whether organization members can create public GitHub Pages sites. Can be one of: - * \* `true` - all organization members can create public GitHub Pages sites. - * \* `false` - no organization members can create public GitHub Pages sites. Existing published sites will not be impacted. - */ - members_can_create_public_pages?: boolean; - /** - * Toggles whether organization members can create private GitHub Pages sites. Can be one of: - * \* `true` - all organization members can create private GitHub Pages sites. - * \* `false` - no organization members can create private GitHub Pages sites. Existing published sites will not be impacted. - */ - members_can_create_private_pages?: boolean; - blog?: string; - }; - }; - }; - }; - /** - * Gets the GitHub Actions permissions policy for repositories and allowed actions in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - "actions/get-github-actions-permissions-organization": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["actions-organization-permissions"]; - }; - }; - }; - }; - /** - * Sets the GitHub Actions permissions policy for repositories and allowed actions in an organization. - * - * If the organization belongs to an enterprise that has set restrictive permissions at the enterprise level, such as `allowed_actions` to `selected` actions, then you cannot override them for the organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - "actions/set-github-actions-permissions-organization": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - enabled_repositories: components["schemas"]["enabled-repositories"]; - allowed_actions?: components["schemas"]["allowed-actions"]; - }; - }; - }; - }; - /** - * Lists the selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - "actions/list-selected-repositories-enabled-github-actions-organization": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - repositories: components["schemas"]["repository"][]; - }; - }; - }; - }; - }; - /** - * Replaces the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - "actions/set-selected-repositories-enabled-github-actions-organization": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - /** List of repository IDs to enable for GitHub Actions. */ - selected_repository_ids: number[]; - }; - }; - }; - }; - /** - * Adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - "actions/enable-selected-repository-github-actions-organization": { - parameters: { - path: { - org: components["parameters"]["org"]; - repository_id: components["parameters"]["repository_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Removes a repository from the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - "actions/disable-selected-repository-github-actions-organization": { - parameters: { - path: { - org: components["parameters"]["org"]; - repository_id: components["parameters"]["repository_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Gets the selected actions that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."" - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - "actions/get-allowed-actions-organization": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["selected-actions"]; - }; - }; - }; - }; - /** - * Sets the actions that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." - * - * If the organization belongs to an enterprise that has `selected` actions set at the enterprise level, then you cannot override any of the enterprise's allowed actions settings. - * - * To use the `patterns_allowed` setting for private repositories, the organization must belong to an enterprise. If the organization does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories in the organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. - */ - "actions/set-allowed-actions-organization": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": components["schemas"]["selected-actions"]; - }; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Lists all self-hosted runner groups configured in an organization and inherited from an enterprise. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/list-self-hosted-runner-groups-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - runner_groups: components["schemas"]["runner-groups-org"][]; - }; - }; - }; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Creates a new self-hosted runner group for an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/create-self-hosted-runner-group-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["runner-groups-org"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** Name of the runner group. */ - name: string; - /** Visibility of a runner group. You can select all repositories, select individual repositories, or limit access to private repositories. Can be one of: `all`, `selected`, or `private`. */ - visibility?: "selected" | "all" | "private"; - /** List of repository IDs that can access the runner group. */ - selected_repository_ids?: number[]; - /** List of runner IDs to add to the runner group. */ - runners?: number[]; - }; - }; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Gets a specific self-hosted runner group for an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/get-self-hosted-runner-group-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["runner-groups-org"]; - }; - }; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Deletes a self-hosted runner group for an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/delete-self-hosted-runner-group-from-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Updates the `name` and `visibility` of a self-hosted runner group in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/update-self-hosted-runner-group-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["runner-groups-org"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** Name of the runner group. */ - name?: string; - /** Visibility of a runner group. You can select all repositories, select individual repositories, or all private repositories. Can be one of: `all`, `selected`, or `private`. */ - visibility?: "selected" | "all" | "private"; - }; - }; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Lists the repositories with access to a self-hosted runner group configured in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/list-repo-access-to-self-hosted-runner-group-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - query: { - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - repositories: components["schemas"]["repository"][]; - }; - }; - }; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Replaces the list of repositories that have access to a self-hosted runner group configured in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/set-repo-access-to-self-hosted-runner-group-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - /** List of repository IDs that can access the runner group. */ - selected_repository_ids: number[]; - }; - }; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * - * Adds a repository to the list of selected repositories that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an organization](#create-a-self-hosted-runner-group-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` - * scope to use this endpoint. - */ - "actions/add-repo-access-to-self-hosted-runner-group-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - repository_id: components["parameters"]["repository_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * - * Removes a repository from the list of selected repositories that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an organization](#create-a-self-hosted-runner-group-for-an-organization)." - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/remove-repo-access-to-self-hosted-runner-group-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - repository_id: components["parameters"]["repository_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Lists self-hosted runners that are in a specific organization group. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/list-self-hosted-runners-in-group-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - runners: components["schemas"]["runner"][]; - }; - }; - }; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * Replaces the list of self-hosted runners that are part of an organization runner group. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/set-self-hosted-runners-in-group-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - /** List of runner IDs to add to the runner group. */ - runners: number[]; - }; - }; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * - * Adds a self-hosted runner to a runner group configured in an organization. - * - * You must authenticate using an access token with the `admin:org` - * scope to use this endpoint. - */ - "actions/add-self-hosted-runner-to-group-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - /** Unique identifier of the self-hosted runner. */ - runner_id: components["parameters"]["runner_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." - * - * - * Removes a self-hosted runner from a group configured in an organization. The runner is then returned to the default group. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/remove-self-hosted-runner-from-group-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner group. */ - runner_group_id: components["parameters"]["runner_group_id"]; - /** Unique identifier of the self-hosted runner. */ - runner_id: components["parameters"]["runner_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Lists all self-hosted runners configured in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/list-self-hosted-runners-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - runners: components["schemas"]["runner"][]; - }; - }; - }; - }; - }; - /** - * Lists binaries for the runner application that you can download and run. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/list-runner-applications-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["runner-application"][]; - }; - }; - }; - }; - /** - * Returns a token that you can pass to the `config` script. The token expires after one hour. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - * - * #### Example using registration token - * - * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. - * - * ``` - * ./config.sh --url https://github.com/octo-org --token TOKEN - * ``` - */ - "actions/create-registration-token-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["authentication-token"]; - }; - }; - }; - }; - /** - * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an organization. The token expires after one hour. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - * - * #### Example using remove token - * - * To remove your self-hosted runner from an organization, replace `TOKEN` with the remove token provided by this - * endpoint. - * - * ``` - * ./config.sh remove --token TOKEN - * ``` - */ - "actions/create-remove-token-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["authentication-token"]; - }; - }; - }; - }; - /** - * Gets a specific self-hosted runner configured in an organization. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/get-self-hosted-runner-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner. */ - runner_id: components["parameters"]["runner_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["runner"]; - }; - }; - }; - }; - /** - * Forces the removal of a self-hosted runner from an organization. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. - * - * You must authenticate using an access token with the `admin:org` scope to use this endpoint. - */ - "actions/delete-self-hosted-runner-from-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** Unique identifier of the self-hosted runner. */ - runner_id: components["parameters"]["runner_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - "actions/list-org-secrets": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - secrets: components["schemas"]["organization-actions-secret"][]; - }; - }; - }; - }; - }; - /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - "actions/get-org-public-key": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["actions-public-key"]; - }; - }; - }; - }; - /** Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - "actions/get-org-secret": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["organization-actions-secret"]; - }; - }; - }; - }; - /** - * Creates or updates an organization secret with an encrypted value. Encrypt your secret using - * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access - * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to - * use this endpoint. - * - * #### Example encrypting a secret using Node.js - * - * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. - * - * ``` - * const sodium = require('tweetsodium'); - * - * const key = "base64-encoded-public-key"; - * const value = "plain-text-secret"; - * - * // Convert the message and key to Uint8Array's (Buffer implements that interface) - * const messageBytes = Buffer.from(value); - * const keyBytes = Buffer.from(key, 'base64'); - * - * // Encrypt using LibSodium. - * const encryptedBytes = sodium.seal(messageBytes, keyBytes); - * - * // Base64 the encrypted secret - * const encrypted = Buffer.from(encryptedBytes).toString('base64'); - * - * console.log(encrypted); - * ``` - * - * - * #### Example encrypting a secret using Python - * - * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/stable/public/#nacl-public-sealedbox) with Python 3. - * - * ``` - * from base64 import b64encode - * from nacl import encoding, public - * - * def encrypt(public_key: str, secret_value: str) -> str: - * """Encrypt a Unicode string using the public key.""" - * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) - * sealed_box = public.SealedBox(public_key) - * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) - * return b64encode(encrypted).decode("utf-8") - * ``` - * - * #### Example encrypting a secret using C# - * - * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. - * - * ``` - * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); - * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); - * - * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); - * - * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); - * ``` - * - * #### Example encrypting a secret using Ruby - * - * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. - * - * ```ruby - * require "rbnacl" - * require "base64" - * - * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") - * public_key = RbNaCl::PublicKey.new(key) - * - * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) - * encrypted_secret = box.encrypt("my_secret") - * - * # Print the base64 encoded secret - * puts Base64.strict_encode64(encrypted_secret) - * ``` - */ - "actions/create-or-update-org-secret": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - }; - }; - responses: { - /** Response when creating a secret */ - 201: { - content: { - "application/json": { - [key: string]: any; - }; - }; - }; - /** Response when updating a secret */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - /** Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an organization public key](https://docs.github.com/rest/reference/actions#get-an-organization-public-key) endpoint. */ - encrypted_value?: string; - /** ID of the key you used to encrypt the secret. */ - key_id?: string; - /** - * Configures the access that repositories have to the organization secret. Can be one of: - * \- `all` - All repositories in an organization can access the secret. - * \- `private` - Private repositories in an organization can access the secret. - * \- `selected` - Only specific repositories can access the secret. - */ - visibility: "all" | "private" | "selected"; - /** An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can manage the list of selected repositories using the [List selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#list-selected-repositories-for-an-organization-secret), [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret), and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret) endpoints. */ - selected_repository_ids?: string[]; - }; - }; - }; - }; - /** Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - "actions/delete-org-secret": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - "actions/list-selected-repos-for-org-secret": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - }; - query: { - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - repositories: components["schemas"]["minimal-repository"][]; - }; - }; - }; - }; - }; - /** Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - "actions/set-selected-repos-for-org-secret": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - /** An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can add and remove individual repositories using the [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret) and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret) endpoints. */ - selected_repository_ids?: number[]; - }; - }; - }; - }; - /** Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - "actions/add-selected-repo-to-org-secret": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - repository_id: number; - }; - }; - responses: { - /** No Content when repository was added to the selected list */ - 204: never; - /** Conflict when visibility type is not set to selected */ - 409: unknown; - }; - }; - /** Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ - "actions/remove-selected-repo-from-org-secret": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - repository_id: number; - }; - }; - responses: { - /** Response when repository was removed from the selected list */ - 204: never; - /** Conflict when visibility type not set to selected */ - 409: unknown; - }; - }; - /** - * Gets the audit log for an organization. For more information, see "[Reviewing the audit log for your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization)." - * - * To use this endpoint, you must be an organization owner, and you must use an access token with the `admin:org` scope. GitHub Apps must have the `organization_administration` read permission to use this endpoint. - */ - "orgs/get-audit-log": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** A search phrase. For more information, see [Searching the audit log](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization#searching-the-audit-log). */ - phrase?: components["parameters"]["audit-log-phrase"]; - /** - * The event types to include: - * - * - `web` - returns web (non-Git) events - * - `git` - returns Git events - * - `all` - returns both web and Git events - * - * The default is `web`. - */ - include?: components["parameters"]["audit-log-include"]; - /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ - after?: components["parameters"]["audit-log-after"]; - /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ - before?: components["parameters"]["audit-log-before"]; - /** - * The order of audit log events. To list newest events first, specify `desc`. To list oldest events first, specify `asc`. - * - * The default is `desc`. - */ - order?: components["parameters"]["audit-log-order"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["audit-log-event"][]; - }; - }; - }; - }; - /** List the users blocked by an organization. */ - "orgs/list-blocked-users": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - 415: components["responses"]["preview_header_missing"]; - }; - }; - "orgs/check-blocked-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** If the user is blocked: */ - 204: never; - /** If the user is not blocked: */ - 404: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - }; - }; - "orgs/block-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - 422: components["responses"]["validation_failed"]; - }; - }; - "orgs/unblock-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Listing and deleting credential authorizations is available to organizations with GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products). - * - * An authenticated organization owner with the `read:org` scope can list all credential authorizations for an organization that uses SAML single sign-on (SSO). The credentials are either personal access tokens or SSH keys that organization members have authorized for the organization. For more information, see [About authentication with SAML single sign-on](https://help.github.com/en/articles/about-authentication-with-saml-single-sign-on). - */ - "orgs/list-saml-sso-authorizations": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["credential-authorization"][]; - }; - }; - }; - }; - /** - * Listing and deleting credential authorizations is available to organizations with GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products). - * - * An authenticated organization owner with the `admin:org` scope can remove a credential authorization for an organization that uses SAML SSO. Once you remove someone's credential authorization, they will need to create a new personal access token or SSH key and authorize it for the organization they want to access. - */ - "orgs/remove-saml-sso-authorization": { - parameters: { - path: { - org: components["parameters"]["org"]; - credential_id: number; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - "activity/list-public-org-events": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["event"][]; - }; - }; - }; - }; - /** The return hash contains `failed_at` and `failed_reason` fields which represent the time at which the invitation failed and the reason for the failure. */ - "orgs/list-failed-invitations": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["organization-invitation"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - "orgs/list-webhooks": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["org-hook"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** Here's how you can create a hook that posts payloads in JSON format: */ - "orgs/create-webhook": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["org-hook"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Must be passed as "web". */ - name: string; - /** Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/orgs#create-hook-config-params). */ - config: { - url: components["schemas"]["webhook-config-url"]; - content_type?: components["schemas"]["webhook-config-content-type"]; - secret?: components["schemas"]["webhook-config-secret"]; - insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; - username?: string; - password?: string; - }; - /** Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. */ - events?: string[]; - /** Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. */ - active?: boolean; - }; - }; - }; - }; - /** Returns a webhook configured in an organization. To get only the webhook `config` properties, see "[Get a webhook configuration for an organization](/rest/reference/orgs#get-a-webhook-configuration-for-an-organization)." */ - "orgs/get-webhook": { - parameters: { - path: { - org: components["parameters"]["org"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["org-hook"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - "orgs/delete-webhook": { - parameters: { - path: { - org: components["parameters"]["org"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** Updates a webhook configured in an organization. When you update a webhook, the `secret` will be overwritten. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for an organization](/rest/reference/orgs#update-a-webhook-configuration-for-an-organization)." */ - "orgs/update-webhook": { - parameters: { - path: { - org: components["parameters"]["org"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["org-hook"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/orgs#update-hook-config-params). */ - config?: { - url: components["schemas"]["webhook-config-url"]; - content_type?: components["schemas"]["webhook-config-content-type"]; - secret?: components["schemas"]["webhook-config-secret"]; - insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; - }; - /** Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. */ - events?: string[]; - /** Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. */ - active?: boolean; - name?: string; - }; - }; - }; - }; - /** - * Returns the webhook configuration for an organization. To get more information about the webhook, including the `active` state and `events`, use "[Get an organization webhook ](/rest/reference/orgs#get-an-organization-webhook)." - * - * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:read` permission. - */ - "orgs/get-webhook-config-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["webhook-config"]; - }; - }; - }; - }; - /** - * Updates the webhook configuration for an organization. To update more information about the webhook, including the `active` state and `events`, use "[Update an organization webhook ](/rest/reference/orgs#update-an-organization-webhook)." - * - * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:write` permission. - */ - "orgs/update-webhook-config-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["webhook-config"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - url?: components["schemas"]["webhook-config-url"]; - content_type?: components["schemas"]["webhook-config-content-type"]; - secret?: components["schemas"]["webhook-config-secret"]; - insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; - }; - }; - }; - }; - /** This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. */ - "orgs/ping-webhook": { - parameters: { - path: { - org: components["parameters"]["org"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Enables an authenticated GitHub App to find the organization's installation information. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - "apps/get-org-installation": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["installation"]; - }; - }; - }; - }; - /** Lists all GitHub Apps in an organization. The installation count includes all GitHub Apps installed on repositories in the organization. You must be an organization owner with `admin:read` scope to use this endpoint. */ - "orgs/list-app-installations": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - installations: components["schemas"]["installation"][]; - }; - }; - }; - }; - }; - /** Shows which type of GitHub user can interact with this organization and when the restriction expires. If there is no restrictions, you will see an empty response. */ - "interactions/get-restrictions-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": Partial & Partial<{ - [key: string]: any; - }>; - }; - }; - }; - }; - /** Temporarily restricts interactions to a certain type of GitHub user in any public repository in the given organization. You must be an organization owner to set these restrictions. Setting the interaction limit at the organization level will overwrite any interaction limits that are set for individual repositories owned by the organization. */ - "interactions/set-restrictions-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["interaction-limit-response"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": components["schemas"]["interaction-limit"]; - }; - }; - }; - /** Removes all interaction restrictions from public repositories in the given organization. You must be an organization owner to remove restrictions. */ - "interactions/remove-restrictions-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. */ - "orgs/list-pending-invitations": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["organization-invitation"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Invite people to an organization by using their GitHub user ID or their email address. In order to create invitations in an organization, the authenticated user must be an organization owner. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - "orgs/create-invitation": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["organization-invitation"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** **Required unless you provide `email`**. GitHub user ID for the person you are inviting. */ - invitee_id?: number; - /** **Required unless you provide `invitee_id`**. Email address of the person you are inviting, which can be an existing GitHub user. */ - email?: string; - /** - * Specify role for new member. Can be one of: - * \* `admin` - Organization owners with full administrative rights to the organization and complete access to all repositories and teams. - * \* `direct_member` - Non-owner organization members with ability to see other members and join teams by invitation. - * \* `billing_manager` - Non-owner organization members with ability to manage the billing settings of your organization. - */ - role?: "admin" | "direct_member" | "billing_manager"; - /** Specify IDs for the teams you want to invite new members to. */ - team_ids?: number[]; - }; - }; - }; - }; - /** - * Cancel an organization invitation. In order to cancel an organization invitation, the authenticated user must be an organization owner. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). - */ - "orgs/cancel-invitation": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** invitation_id parameter */ - invitation_id: components["parameters"]["invitation_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** List all teams associated with an invitation. In order to see invitations in an organization, the authenticated user must be an organization owner. */ - "orgs/list-invitation-teams": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** invitation_id parameter */ - invitation_id: components["parameters"]["invitation_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["team"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * List issues in an organization assigned to the authenticated user. - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - "issues/list-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** - * Indicates which sorts of issues to return. Can be one of: - * \* `assigned`: Issues assigned to you - * \* `created`: Issues created by you - * \* `mentioned`: Issues mentioning you - * \* `subscribed`: Issues you're subscribed to updates for - * \* `all`: All issues the authenticated user can see, regardless of participation or creation - */ - filter?: "assigned" | "created" | "mentioned" | "subscribed" | "repos" | "all"; - /** Indicates the state of the issues to return. Can be either `open`, `closed`, or `all`. */ - state?: "open" | "closed" | "all"; - /** A list of comma separated label names. Example: `bug,ui,@high` */ - labels?: components["parameters"]["labels"]; - /** What to sort results by. Can be either `created`, `updated`, `comments`. */ - sort?: "created" | "updated" | "comments"; - /** One of `asc` (ascending) or `desc` (descending). */ - direction?: components["parameters"]["direction"]; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["issue"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** List all users who are members of an organization. If the authenticated user is also a member of this organization then both concealed and public members will be returned. */ - "orgs/list-members": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** - * Filter members returned in the list. Can be one of: - * \* `2fa_disabled` - Members without [two-factor authentication](https://github.com/blog/1614-two-factor-authentication) enabled. Available for organization owners. - * \* `all` - All members the authenticated user can see. - */ - filter?: "2fa_disabled" | "all"; - /** - * Filter members returned by their role. Can be one of: - * \* `all` - All members of the organization, regardless of role. - * \* `admin` - Organization owners. - * \* `member` - Non-owner organization members. - */ - role?: "all" | "admin" | "member"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - /** Response if requester is not an organization member */ - 302: never; - 422: components["responses"]["validation_failed"]; - }; - }; - /** Check if a user is, publicly or privately, a member of the organization. */ - "orgs/check-membership-for-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response if requester is an organization member and user is a member */ - 204: never; - /** Response if requester is not an organization member */ - 302: never; - /** Not Found if requester is an organization member and user is not a member */ - 404: unknown; - }; - }; - /** Removing a user from this list will remove them from all teams and they will no longer have any access to the organization's repositories. */ - "orgs/remove-member": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - 403: components["responses"]["forbidden"]; - }; - }; - /** In order to get a user's membership with an organization, the authenticated user must be an organization member. The `state` parameter in the response can be used to identify the user's membership status. */ - "orgs/get-membership-for-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["org-membership"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Only authenticated organization owners can add a member to the organization or update the member's role. - * - * * If the authenticated user is _adding_ a member to the organization, the invited user will receive an email inviting them to the organization. The user's [membership status](https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user) will be `pending` until they accept the invitation. - * - * * Authenticated users can _update_ a user's membership by passing the `role` parameter. If the authenticated user changes a member's role to `admin`, the affected user will receive an email notifying them that they've been made an organization owner. If the authenticated user changes an owner's role to `member`, no email will be sent. - * - * **Rate limits** - * - * To prevent abuse, the authenticated user is limited to 50 organization invitations per 24 hour period. If the organization is more than one month old or on a paid plan, the limit is 500 invitations per 24 hour period. - */ - "orgs/set-membership-for-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["org-membership"]; - }; - }; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** - * The role to give the user in the organization. Can be one of: - * \* `admin` - The user will become an owner of the organization. - * \* `member` - The user will become a non-owner member of the organization. - */ - role?: "admin" | "member"; - }; - }; - }; - }; - /** - * In order to remove a user's membership with an organization, the authenticated user must be an organization owner. - * - * If the specified user is an active member of the organization, this will remove them from the organization. If the specified user has been invited to the organization, this will cancel their invitation. The specified user will receive an email notification in both cases. - */ - "orgs/remove-membership-for-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Lists the most recent migrations. */ - "migrations/list-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Exclude attributes from the API response to improve performance */ - exclude?: "repositories"[]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["migration"][]; - }; - }; - }; - }; - /** Initiates the generation of a migration archive. */ - "migrations/start-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["migration"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** A list of arrays indicating which repositories should be migrated. */ - repositories: string[]; - /** Indicates whether repositories should be locked (to prevent manipulation) while migrating data. */ - lock_repositories?: boolean; - /** Indicates whether attachments should be excluded from the migration (to reduce migration archive file size). */ - exclude_attachments?: boolean; - exclude?: "repositories"[]; - }; - }; - }; - }; - /** - * Fetches the status of a migration. - * - * The `state` of a migration can be one of the following values: - * - * * `pending`, which means the migration hasn't started yet. - * * `exporting`, which means the migration is in progress. - * * `exported`, which means the migration finished successfully. - * * `failed`, which means the migration failed. - */ - "migrations/get-status-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** migration_id parameter */ - migration_id: components["parameters"]["migration_id"]; - }; - query: { - /** Exclude attributes from the API response to improve performance */ - exclude?: "repositories"[]; - }; - }; - responses: { - /** - * * `pending`, which means the migration hasn't started yet. - * * `exporting`, which means the migration is in progress. - * * `exported`, which means the migration finished successfully. - * * `failed`, which means the migration failed. - */ - 200: { - content: { - "application/json": components["schemas"]["migration"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** Fetches the URL to a migration archive. */ - "migrations/download-archive-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** migration_id parameter */ - migration_id: components["parameters"]["migration_id"]; - }; - }; - responses: { - /** Response */ - 302: never; - 404: components["responses"]["not_found"]; - }; - }; - /** Deletes a previous migration archive. Migration archives are automatically deleted after seven days. */ - "migrations/delete-archive-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** migration_id parameter */ - migration_id: components["parameters"]["migration_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** Unlocks a repository that was locked for migration. You should unlock each migrated repository and [delete them](https://docs.github.com/rest/reference/repos#delete-a-repository) when the migration is complete and you no longer need the source data. */ - "migrations/unlock-repo-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** migration_id parameter */ - migration_id: components["parameters"]["migration_id"]; - /** repo_name parameter */ - repo_name: components["parameters"]["repo_name"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** List all the repositories for this organization migration. */ - "migrations/list-repos-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** migration_id parameter */ - migration_id: components["parameters"]["migration_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["minimal-repository"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** List all users who are outside collaborators of an organization. */ - "orgs/list-outside-collaborators": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** - * Filter the list of outside collaborators. Can be one of: - * \* `2fa_disabled`: Outside collaborators without [two-factor authentication](https://github.com/blog/1614-two-factor-authentication) enabled. - * \* `all`: All outside collaborators. - */ - filter?: "2fa_disabled" | "all"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - }; - }; - /** When an organization member is converted to an outside collaborator, they'll only have access to the repositories that their current team membership allows. The user will no longer be a member of the organization. For more information, see "[Converting an organization member to an outside collaborator](https://help.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)". */ - "orgs/convert-member-to-outside-collaborator": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** User is getting converted asynchronously */ - 202: { - content: { - "application/json": { - [key: string]: any; - }; - }; - }; - /** User was converted */ - 204: never; - /** Forbidden if user is the last owner of the organization or not a member of the organization */ - 403: unknown; - 404: components["responses"]["not_found"]; - }; - }; - /** Removing a user from this list will remove them from all the organization's repositories. */ - "orgs/remove-outside-collaborator": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - /** Unprocessable Entity if user is a member of the organization */ - 422: { - content: { - "application/json": { - message?: string; - documentation_url?: string; - }; - }; - }; - }; - }; - /** - * Gets a specific package in an organization. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/get-package-for-organization": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["package"]; - }; - }; - }; - }; - /** - * Deletes an entire package in an organization. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` scope. In addition: - * - If `package_type` is not `container`, your token must also include the `repo` scope. - * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. - */ - "packages/delete-package-for-org": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 204: never; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Restores an entire package in an organization. - * - * You can restore a deleted package under the following conditions: - * - The package was deleted within the last 30 days. - * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scope. In addition: - * - If `package_type` is not `container`, your token must also include the `repo` scope. - * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. - */ - "packages/restore-package-for-org": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - org: components["parameters"]["org"]; - }; - query: { - /** package token */ - token?: string; - }; - }; - responses: { - /** Response */ - 204: never; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Returns all package versions for a package owned by an organization. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/get-all-package-versions-for-package-owned-by-org": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - org: components["parameters"]["org"]; - }; - query: { - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** The state of the package, either active or deleted. */ - state?: "active" | "deleted"; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["package-version"][]; - }; - }; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Gets a specific package version in an organization. - * - * You must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/get-package-version-for-organization": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - org: components["parameters"]["org"]; - /** Unique identifier of the package version. */ - package_version_id: components["parameters"]["package_version_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["package-version"]; - }; - }; - }; - }; - /** - * Deletes a specific package version in an organization. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` scope. In addition: - * - If `package_type` is not `container`, your token must also include the `repo` scope. - * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. - */ - "packages/delete-package-version-for-org": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - org: components["parameters"]["org"]; - /** Unique identifier of the package version. */ - package_version_id: components["parameters"]["package_version_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Restores a specific package version in an organization. - * - * You can restore a deleted package under the following conditions: - * - The package was deleted within the last 30 days. - * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scope. In addition: - * - If `package_type` is not `container`, your token must also include the `repo` scope. - * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. - */ - "packages/restore-package-version-for-org": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - org: components["parameters"]["org"]; - /** Unique identifier of the package version. */ - package_version_id: components["parameters"]["package_version_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Lists the projects in an organization. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ - "projects/list-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Indicates the state of the projects to return. Can be either `open`, `closed`, or `all`. */ - state?: "open" | "closed" | "all"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["project"][]; - }; - }; - 422: components["responses"]["validation_failed_simple"]; - }; - }; - /** Creates an organization project board. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ - "projects/create-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["project"]; - }; - }; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** The name of the project. */ - name: string; - /** The description of the project. */ - body?: string; - }; - }; - }; - }; - /** Members of an organization can choose to have their membership publicized or not. */ - "orgs/list-public-members": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - }; - }; - "orgs/check-public-membership-for-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response if user is a public member */ - 204: never; - /** Not Found if user is not a public member */ - 404: unknown; - }; - }; - /** - * The user can publicize their own membership. (A user cannot publicize the membership for another user.) - * - * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - "orgs/set-public-membership-for-authenticated-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - 403: components["responses"]["forbidden"]; - }; - }; - "orgs/remove-public-membership-for-authenticated-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** Lists repositories for the specified organization. */ - "repos/list-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Specifies the types of repositories you want returned. Can be one of `all`, `public`, `private`, `forks`, `sources`, `member`, `internal`. Note: For GitHub AE, can be one of `all`, `private`, `forks`, `sources`, `member`, `internal`. Default: `all`. If your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+, `type` can also be `internal`. */ - type?: "all" | "public" | "private" | "forks" | "sources" | "member" | "internal"; - /** Can be one of `created`, `updated`, `pushed`, `full_name`. */ - sort?: "created" | "updated" | "pushed" | "full_name"; - /** Can be one of `asc` or `desc`. Default: when using `full_name`: `asc`, otherwise `desc` */ - direction?: "asc" | "desc"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["minimal-repository"][]; - }; - }; - }; - }; - /** - * Creates a new repository in the specified organization. The authenticated user must be a member of the organization. - * - * **OAuth scope requirements** - * - * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: - * - * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. - * * `repo` scope to create a private repository - */ - "repos/create-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["repository"]; - }; - }; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The name of the repository. */ - name: string; - /** A short description of the repository. */ - description?: string; - /** A URL with more information about the repository. */ - homepage?: string; - /** Whether the repository is private. */ - private?: boolean; - /** - * Can be `public` or `private`. If your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+, `visibility` can also be `internal`. Note: For GitHub Enterprise Server and GitHub AE, this endpoint will only list repositories available to all users on the enterprise. For more information, see "[Creating an internal repository](https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-repository-visibility#about-internal-repositories)" in the GitHub Help documentation. - * The `visibility` parameter overrides the `private` parameter when you use both parameters with the `nebula-preview` preview header. - */ - visibility?: "public" | "private" | "visibility" | "internal"; - /** Either `true` to enable issues for this repository or `false` to disable them. */ - has_issues?: boolean; - /** Either `true` to enable projects for this repository or `false` to disable them. **Note:** If you're creating a repository in an organization that has disabled repository projects, the default is `false`, and if you pass `true`, the API returns an error. */ - has_projects?: boolean; - /** Either `true` to enable the wiki for this repository or `false` to disable it. */ - has_wiki?: boolean; - /** Either `true` to make this repo available as a template repository or `false` to prevent it. */ - is_template?: boolean; - /** The id of the team that will be granted access to this repository. This is only valid when creating a repository in an organization. */ - team_id?: number; - /** Pass `true` to create an initial commit with empty README. */ - auto_init?: boolean; - /** Desired language or platform [.gitignore template](https://github.com/github/gitignore) to apply. Use the name of the template without the extension. For example, "Haskell". */ - gitignore_template?: string; - /** Choose an [open source license template](https://choosealicense.com/) that best suits your needs, and then use the [license keyword](https://help.github.com/articles/licensing-a-repository/#searching-github-by-license-type) as the `license_template` string. For example, "mit" or "mpl-2.0". */ - license_template?: string; - /** Either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging. */ - allow_squash_merge?: boolean; - /** Either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. */ - allow_merge_commit?: boolean; - /** Either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging. */ - allow_rebase_merge?: boolean; - /** Either `true` to allow automatically deleting head branches when pull requests are merged, or `false` to prevent automatic deletion. */ - delete_branch_on_merge?: boolean; - }; - }; - }; - }; - /** - * Gets the summary of the free and paid GitHub Actions minutes used. - * - * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * Access tokens must have the `repo` or `admin:org` scope. - */ - "billing/get-github-actions-billing-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["actions-billing-usage"]; - }; - }; - }; - }; - /** - * Gets the free and paid storage usued for GitHub Packages in gigabytes. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * Access tokens must have the `repo` or `admin:org` scope. - */ - "billing/get-github-packages-billing-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["packages-billing-usage"]; - }; - }; - }; - }; - /** - * Gets the estimated paid and estimated total storage used for GitHub Actions and Github Packages. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * Access tokens must have the `repo` or `admin:org` scope. - */ - "billing/get-shared-storage-billing-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["combined-billing-usage"]; - }; - }; - }; - }; - /** - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * List IdP groups available in an organization. You can limit your page results using the `per_page` parameter. GitHub generates a url-encoded `page` token using a cursor value for where the next page begins. For more information on cursor pagination, see "[Offset and Cursor Pagination explained](https://dev.to/jackmarchant/offset-and-cursor-pagination-explained-b89)." - * - * The `per_page` parameter provides pagination for a list of IdP groups the authenticated user can access in an organization. For example, if the user `octocat` wants to see two groups per page in `octo-org` via cURL, it would look like this: - */ - "teams/list-idp-groups-for-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page token */ - page?: string; - }; - }; - responses: { - /** Response */ - 200: { - headers: { - Link?: string; - }; - content: { - "application/json": components["schemas"]["group-mapping"]; - }; - }; - }; - }; - /** Lists all teams in an organization that are visible to the authenticated user. */ - "teams/list": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["team"][]; - }; - }; - 403: components["responses"]["forbidden"]; - }; - }; - /** - * To create a team, the authenticated user must be a member or owner of `{org}`. By default, organization members can create teams. Organization owners can limit team creation to organization owners. For more information, see "[Setting team creation permissions](https://help.github.com/en/articles/setting-team-creation-permissions-in-your-organization)." - * - * When you create a new team, you automatically become a team maintainer without explicitly adding yourself to the optional array of `maintainers`. For more information, see "[About teams](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/about-teams)". - */ - "teams/create": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["team-full"]; - }; - }; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The name of the team. */ - name: string; - /** The description of the team. */ - description?: string; - /** List GitHub IDs for organization members who will become team maintainers. */ - maintainers?: string[]; - /** The full name (e.g., "organization-name/repository-name") of repositories to add the team to. */ - repo_names?: string[]; - /** - * The level of privacy this team should have. The options are: - * **For a non-nested team:** - * \* `secret` - only visible to organization owners and members of this team. - * \* `closed` - visible to all members of this organization. - * Default: `secret` - * **For a parent or child team:** - * \* `closed` - visible to all members of this organization. - * Default for child team: `closed` - */ - privacy?: "secret" | "closed"; - /** - * **Deprecated**. The permission that new repositories will be added to the team with when none is specified. Can be one of: - * \* `pull` - team members can pull, but not push to or administer newly-added repositories. - * \* `push` - team members can pull and push, but not administer newly-added repositories. - * \* `admin` - team members can pull, push and administer newly-added repositories. - */ - permission?: "pull" | "push" | "admin"; - /** The ID of a team to set as the parent team. */ - parent_team_id?: number; - }; - }; - }; - }; - /** - * Gets a team using the team's `slug`. GitHub generates the `slug` from the team `name`. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}`. - */ - "teams/get-by-name": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-full"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * To delete a team, the authenticated user must be an organization owner or team maintainer. - * - * If you are an organization owner, deleting a parent team will delete all of its child teams as well. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}`. - */ - "teams/delete-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * To edit a team, the authenticated user must either be an organization owner or a team maintainer. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}`. - */ - "teams/update-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["team-full"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The name of the team. */ - name?: string; - /** The description of the team. */ - description?: string; - /** - * The level of privacy this team should have. Editing teams without specifying this parameter leaves `privacy` intact. When a team is nested, the `privacy` for parent teams cannot be `secret`. The options are: - * **For a non-nested team:** - * \* `secret` - only visible to organization owners and members of this team. - * \* `closed` - visible to all members of this organization. - * **For a parent or child team:** - * \* `closed` - visible to all members of this organization. - */ - privacy?: "secret" | "closed"; - /** - * **Deprecated**. The permission that new repositories will be added to the team with when none is specified. Can be one of: - * \* `pull` - team members can pull, but not push to or administer newly-added repositories. - * \* `push` - team members can pull and push, but not administer newly-added repositories. - * \* `admin` - team members can pull, push and administer newly-added repositories. - */ - permission?: "pull" | "push" | "admin"; - /** The ID of a team to set as the parent team. */ - parent_team_id?: number | null; - }; - }; - }; - }; - /** - * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions`. - */ - "teams/list-discussions-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - }; - query: { - /** One of `asc` (ascending) or `desc` (descending). */ - direction?: components["parameters"]["direction"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Pinned discussions only filter */ - pinned?: string; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["team-discussion"][]; - }; - }; - }; - }; - /** - * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions`. - */ - "teams/create-discussion-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["team-discussion"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The discussion post's title. */ - title: string; - /** The discussion post's body text. */ - body: string; - /** Private posts are only visible to team members, organization owners, and team maintainers. Public posts are visible to all members of the organization. Set to `true` to create a private post. */ - private?: boolean; - }; - }; - }; - }; - /** - * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. - */ - "teams/get-discussion-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-discussion"]; - }; - }; - }; - }; - /** - * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. - */ - "teams/delete-discussion-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. - */ - "teams/update-discussion-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-discussion"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The discussion post's title. */ - title?: string; - /** The discussion post's body text. */ - body?: string; - }; - }; - }; - }; - /** - * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. - */ - "teams/list-discussion-comments-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - query: { - /** One of `asc` (ascending) or `desc` (descending). */ - direction?: components["parameters"]["direction"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["team-discussion-comment"][]; - }; - }; - }; - }; - /** - * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. - */ - "teams/create-discussion-comment-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["team-discussion-comment"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The discussion comment's body text. */ - body: string; - }; - }; - }; - }; - /** - * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. - */ - "teams/get-discussion-comment-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - comment_number: components["parameters"]["comment-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-discussion-comment"]; - }; - }; - }; - }; - /** - * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. - */ - "teams/delete-discussion-comment-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - comment_number: components["parameters"]["comment-number"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. - */ - "teams/update-discussion-comment-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - comment_number: components["parameters"]["comment-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-discussion-comment"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The discussion comment's body text. */ - body: string; - }; - }; - }; - }; - /** - * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments/). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. - */ - "reactions/list-for-team-discussion-comment-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - comment_number: components["parameters"]["comment-number"]; - }; - query: { - /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion comment. */ - content?: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["reaction"][]; - }; - }; - }; - }; - /** - * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion comment. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. - */ - "reactions/create-for-team-discussion-comment-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - comment_number: components["parameters"]["comment-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion comment. */ - content: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - }; - }; - }; - }; - /** - * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions/:reaction_id`. - * - * Delete a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - "reactions/delete-for-team-discussion-comment": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - comment_number: components["parameters"]["comment-number"]; - reaction_id: components["parameters"]["reaction-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. - */ - "reactions/list-for-team-discussion-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - query: { - /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion. */ - content?: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["reaction"][]; - }; - }; - }; - }; - /** - * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. - */ - "reactions/create-for-team-discussion-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion. */ - content: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - }; - }; - }; - }; - /** - * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions/:reaction_id`. - * - * Delete a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - "reactions/delete-for-team-discussion": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - discussion_number: components["parameters"]["discussion-number"]; - reaction_id: components["parameters"]["reaction-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/invitations`. - */ - "teams/list-pending-invitations-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["organization-invitation"][]; - }; - }; - }; - }; - /** - * Team members will include the members of child teams. - * - * To list members in a team, the team must be visible to the authenticated user. - */ - "teams/list-members-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - }; - query: { - /** - * Filters members returned by their role in the team. Can be one of: - * \* `member` - normal members of the team. - * \* `maintainer` - team maintainers. - * \* `all` - all members of the team. - */ - role?: "member" | "maintainer" | "all"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - }; - }; - /** - * Team members will include the members of child teams. - * - * To get a user's membership with a team, the team must be visible to the authenticated user. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/memberships/{username}`. - * - * **Note:** - * The response contains the `state` of the membership and the member's `role`. - * - * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). - */ - "teams/get-membership-for-user-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-membership"]; - }; - }; - /** if user has no team membership */ - 404: unknown; - }; - }; - /** - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Adds an organization member to a team. An authenticated organization owner or team maintainer can add organization members to a team. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - * - * An organization owner can add someone who is not part of the team's organization to a team. When an organization owner adds someone to a team who is not an organization member, this endpoint will send an invitation to the person via email. This newly-created membership will be in the "pending" state until the person accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. - * - * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/memberships/{username}`. - */ - "teams/add-or-update-membership-for-user-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-membership"]; - }; - }; - /** Forbidden if team synchronization is set up */ - 403: unknown; - /** Unprocessable Entity if you attempt to add an organization to a team */ - 422: unknown; - }; - requestBody: { - content: { - "application/json": { - /** - * The role that this user should have in the team. Can be one of: - * \* `member` - a normal member of the team. - * \* `maintainer` - a team maintainer. Able to add/remove other team members, promote other team members to team maintainer, and edit the team's name and description. - */ - role?: "member" | "maintainer"; - }; - }; - }; - }; - /** - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/memberships/{username}`. - */ - "teams/remove-membership-for-user-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - /** Forbidden if team synchronization is set up */ - 403: unknown; - }; - }; - /** - * Lists the organization projects for a team. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects`. - */ - "teams/list-projects-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["team-project"][]; - }; - }; - }; - }; - /** - * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects/{project_id}`. - */ - "teams/check-permissions-for-project-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - project_id: components["parameters"]["project-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-project"]; - }; - }; - /** Not Found if project is not managed by this team */ - 404: unknown; - }; - }; - /** - * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/projects/{project_id}`. - */ - "teams/add-or-update-project-permissions-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - project_id: components["parameters"]["project-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - /** Forbidden if the project is not owned by the organization */ - 403: { - content: { - "application/json": { - message?: string; - documentation_url?: string; - }; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** - * The permission to grant to the team for this project. Can be one of: - * \* `read` - team members can read, but not write to or administer this project. - * \* `write` - team members can read and write, but not administer this project. - * \* `admin` - team members can read, write and administer this project. - * Default: the team's `permission` attribute will be used to determine what permission to grant the team on this project. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - permission?: "read" | "write" | "admin"; - } | null; - }; - }; - }; - /** - * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. This endpoint removes the project from the team, but does not delete the project. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/projects/{project_id}`. - */ - "teams/remove-project-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - project_id: components["parameters"]["project-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Lists a team's repositories visible to the authenticated user. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos`. - */ - "teams/list-repos-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["minimal-repository"][]; - }; - }; - }; - }; - /** - * Checks whether a team has `admin`, `push`, `maintain`, `triage`, or `pull` permission for a repository. Repositories inherited through a parent team will also be checked. - * - * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `application/vnd.github.v3.repository+json` accept header. - * - * If a team doesn't have permission for the repository, you will receive a `404 Not Found` response status. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. - */ - "teams/check-permissions-for-repo-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Alternative response with repository permissions */ - 200: { - content: { - "application/json": components["schemas"]["team-repository"]; - }; - }; - /** Response if team has permission for the repository. This is the response when the repository media type hasn't been provded in the Accept header. */ - 204: never; - /** Not Found if team does not have permission for the repository */ - 404: unknown; - }; - }; - /** - * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. - * - * For more information about the permission levels, see "[Repository permission levels for an organization](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". - */ - "teams/add-or-update-repo-permissions-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - /** - * The permission to grant the team on this repository. Can be one of: - * \* `pull` - team members can pull, but not push to or administer this repository. - * \* `push` - team members can pull and push, but not administer this repository. - * \* `admin` - team members can pull, push and administer this repository. - * \* `maintain` - team members can manage the repository without access to sensitive or destructive actions. Recommended for project managers. Only applies to repositories owned by organizations. - * \* `triage` - team members can proactively manage issues and pull requests without write access. Recommended for contributors who triage a repository. Only applies to repositories owned by organizations. - * - * If no permission is specified, the team's `permission` attribute will be used to determine what permission to grant the team on this repository. - */ - permission?: "pull" | "push" | "admin" | "maintain" | "triage"; - }; - }; - }; - }; - /** - * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. This does not delete the repository, it just removes it from the team. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. - */ - "teams/remove-repo-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * List IdP groups connected to a team on GitHub. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/team-sync/group-mappings`. - */ - "teams/list-idp-groups-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["group-mapping"]; - }; - }; - }; - }; - /** - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Creates, updates, or removes a connection between a team and an IdP group. When adding groups to a team, you must include all new and existing groups to avoid replacing existing groups with the new ones. Specifying an empty `groups` array will remove all connections for a team. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/team-sync/group-mappings`. - */ - "teams/create-or-update-idp-group-connections-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["group-mapping"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The IdP groups you want to connect to a GitHub team. When updating, the new `groups` object will replace the original one. You must include any existing groups that you don't want to remove. */ - groups?: { - /** ID of the IdP group. */ - group_id: string; - /** Name of the IdP group. */ - group_name: string; - /** Description of the IdP group. */ - group_description: string; - }[]; - }; - }; - }; - }; - /** - * Lists the child teams of the team specified by `{team_slug}`. - * - * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/teams`. - */ - "teams/list-child-in-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** team_slug parameter */ - team_slug: components["parameters"]["team_slug"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** if child teams exist */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["team"][]; - }; - }; - }; - }; - "projects/get-card": { - parameters: { - path: { - /** card_id parameter */ - card_id: components["parameters"]["card_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["project-card"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - "projects/delete-card": { - parameters: { - path: { - /** card_id parameter */ - card_id: components["parameters"]["card_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - /** Forbidden */ - 403: { - content: { - "application/json": { - message?: string; - documentation_url?: string; - errors?: string[]; - }; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - "projects/update-card": { - parameters: { - path: { - /** card_id parameter */ - card_id: components["parameters"]["card_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["project-card"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** The project card's note */ - note?: string | null; - /** Whether or not the card is archived */ - archived?: boolean; - }; - }; - }; - }; - "projects/move-card": { - parameters: { - path: { - /** card_id parameter */ - card_id: components["parameters"]["card_id"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": { - [key: string]: any; - }; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - /** Forbidden */ - 403: { - content: { - "application/json": { - message?: string; - documentation_url?: string; - errors?: { - code?: string; - message?: string; - resource?: string; - field?: string; - }[]; - }; - }; - }; - 422: components["responses"]["validation_failed"]; - /** Response */ - 503: { - content: { - "application/json": { - code?: string; - message?: string; - documentation_url?: string; - errors?: { - code?: string; - message?: string; - }[]; - }; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The position of the card in a column */ - position: string; - /** The unique identifier of the column the card should be moved to */ - column_id?: number; - }; - }; - }; - }; - "projects/get-column": { - parameters: { - path: { - /** column_id parameter */ - column_id: components["parameters"]["column_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["project-column"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - "projects/delete-column": { - parameters: { - path: { - /** column_id parameter */ - column_id: components["parameters"]["column_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - "projects/update-column": { - parameters: { - path: { - /** column_id parameter */ - column_id: components["parameters"]["column_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["project-column"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - requestBody: { - content: { - "application/json": { - /** Name of the project column */ - name: string; - }; - }; - }; - }; - "projects/list-cards": { - parameters: { - path: { - /** column_id parameter */ - column_id: components["parameters"]["column_id"]; - }; - query: { - /** Filters the project cards that are returned by the card's state. Can be one of `all`,`archived`, or `not_archived`. */ - archived_state?: "all" | "archived" | "not_archived"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["project-card"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by the `pull_request` key. - * - * Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - "projects/create-card": { - parameters: { - path: { - /** column_id parameter */ - column_id: components["parameters"]["column_id"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["project-card"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - /** Validation failed */ - 422: { - content: { - "application/json": components["schemas"]["validation-error"] | components["schemas"]["validation-error-simple"]; - }; - }; - /** Response */ - 503: { - content: { - "application/json": { - code?: string; - message?: string; - documentation_url?: string; - errors?: { - code?: string; - message?: string; - }[]; - }; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The project card's note */ - note: string | null; - } | { - /** The unique identifier of the content associated with the card */ - content_id: number; - /** The piece of content associated with the card */ - content_type: string; - }; - }; - }; - }; - "projects/move-column": { - parameters: { - path: { - /** column_id parameter */ - column_id: components["parameters"]["column_id"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": { - [key: string]: any; - }; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** The position of the column in a project */ - position: string; - }; - }; - }; - }; - /** Gets a project by its `id`. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ - "projects/get": { - parameters: { - path: { - project_id: components["parameters"]["project-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["project"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** Deletes a project board. Returns a `404 Not Found` status if projects are disabled. */ - "projects/delete": { - parameters: { - path: { - project_id: components["parameters"]["project-id"]; - }; - }; - responses: { - /** Delete Success */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - /** Forbidden */ - 403: { - content: { - "application/json": { - message?: string; - documentation_url?: string; - errors?: string[]; - }; - }; - }; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - }; - }; - /** Updates a project board's information. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ - "projects/update": { - parameters: { - path: { - project_id: components["parameters"]["project-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["project"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - /** Forbidden */ - 403: { - content: { - "application/json": { - message?: string; - documentation_url?: string; - errors?: string[]; - }; - }; - }; - /** Not Found if the authenticated user does not have access to the project */ - 404: unknown; - 410: components["responses"]["gone"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** Name of the project */ - name?: string; - /** Body of the project */ - body?: string | null; - /** State of the project; either 'open' or 'closed' */ - state?: string; - /** The baseline permission that all organization members have on this project */ - organization_permission?: "read" | "write" | "admin" | "none"; - /** Whether or not this project can be seen by everyone. */ - private?: boolean; - }; - }; - }; - }; - /** Lists the collaborators for an organization project. For a project, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. You must be an organization owner or a project `admin` to list collaborators. */ - "projects/list-collaborators": { - parameters: { - path: { - project_id: components["parameters"]["project-id"]; - }; - query: { - /** - * Filters the collaborators by their affiliation. Can be one of: - * \* `outside`: Outside collaborators of a project that are not a member of the project's organization. - * \* `direct`: Collaborators with permissions to a project, regardless of organization membership status. - * \* `all`: All collaborators the authenticated user can see. - */ - affiliation?: "outside" | "direct" | "all"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** Adds a collaborator to an organization project and sets their permission level. You must be an organization owner or a project `admin` to add a collaborator. */ - "projects/add-collaborator": { - parameters: { - path: { - project_id: components["parameters"]["project-id"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The permission to grant the collaborator. */ - permission?: "read" | "write" | "admin"; - } | null; - }; - }; - }; - /** Removes a collaborator from an organization project. You must be an organization owner or a project `admin` to remove a collaborator. */ - "projects/remove-collaborator": { - parameters: { - path: { - project_id: components["parameters"]["project-id"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** Returns the collaborator's permission level for an organization project. Possible values for the `permission` key: `admin`, `write`, `read`, `none`. You must be an organization owner or a project `admin` to review a user's permission level. */ - "projects/get-permission-for-user": { - parameters: { - path: { - project_id: components["parameters"]["project-id"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["repository-collaborator-permission"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - }; - "projects/list-columns": { - parameters: { - path: { - project_id: components["parameters"]["project-id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["project-column"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - "projects/create-column": { - parameters: { - path: { - project_id: components["parameters"]["project-id"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["project-column"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** Name of the project column */ - name: string; - }; - }; - }; - }; - /** - * **Note:** Accessing this endpoint does not count against your REST API rate limit. - * - * **Note:** The `rate` object is deprecated. If you're writing new API client code or updating existing code, you should use the `core` object instead of the `rate` object. The `core` object contains the same information that is present in the `rate` object. - */ - "rate-limit/get": { - parameters: {}; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["rate-limit-overview"]; - }; - }; - 304: components["responses"]["not_modified"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Reactions API. We recommend migrating your existing code to use the new delete reactions endpoints. For more information, see this [blog post](https://developer.github.com/changes/2020-02-26-new-delete-reactions-endpoints/). - * - * OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), when deleting a [team discussion](https://docs.github.com/rest/reference/teams#discussions) or [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). - */ - "reactions/delete-legacy": { - parameters: { - path: { - reaction_id: components["parameters"]["reaction-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 410: components["responses"]["gone"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** - * When you pass the `scarlet-witch-preview` media type, requests to get a repository will also return the repository's code of conduct if it can be detected from the repository's code of conduct file. - * - * The `parent` and `source` objects are present when the repository is a fork. `parent` is the repository this repository was forked from, `source` is the ultimate source for the network. - */ - "repos/get": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["full-repository"]; - }; - }; - 301: components["responses"]["moved_permanently"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required. - * - * If an organization owner has configured the organization to prevent members from deleting organization-owned - * repositories, you will get a `403 Forbidden` response. - */ - "repos/delete": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - /** If an organization owner has configured the organization to prevent members from deleting organization-owned repositories, a member will get this response: */ - 403: { - content: { - "application/json": { - message?: string; - documentation_url?: string; - }; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** **Note**: To edit a repository's topics, use the [Replace all repository topics](https://docs.github.com/rest/reference/repos#replace-all-repository-topics) endpoint. */ - "repos/update": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["full-repository"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The name of the repository. */ - name?: string; - /** A short description of the repository. */ - description?: string; - /** A URL with more information about the repository. */ - homepage?: string; - /** - * Either `true` to make the repository private or `false` to make it public. Default: `false`. - * **Note**: You will get a `422` error if the organization restricts [changing repository visibility](https://help.github.com/articles/repository-permission-levels-for-an-organization#changing-the-visibility-of-repositories) to organization owners and a non-owner tries to change the value of private. **Note**: You will get a `422` error if the organization restricts [changing repository visibility](https://help.github.com/articles/repository-permission-levels-for-an-organization#changing-the-visibility-of-repositories) to organization owners and a non-owner tries to change the value of private. - */ - private?: boolean; - /** Can be `public` or `private`. If your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+, `visibility` can also be `internal`. The `visibility` parameter overrides the `private` parameter when you use both along with the `nebula-preview` preview header. */ - visibility?: "public" | "private" | "visibility" | "internal"; - /** Either `true` to enable issues for this repository or `false` to disable them. */ - has_issues?: boolean; - /** Either `true` to enable projects for this repository or `false` to disable them. **Note:** If you're creating a repository in an organization that has disabled repository projects, the default is `false`, and if you pass `true`, the API returns an error. */ - has_projects?: boolean; - /** Either `true` to enable the wiki for this repository or `false` to disable it. */ - has_wiki?: boolean; - /** Either `true` to make this repo available as a template repository or `false` to prevent it. */ - is_template?: boolean; - /** Updates the default branch for this repository. */ - default_branch?: string; - /** Either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging. */ - allow_squash_merge?: boolean; - /** Either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. */ - allow_merge_commit?: boolean; - /** Either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging. */ - allow_rebase_merge?: boolean; - /** Either `true` to allow automatically deleting head branches when pull requests are merged, or `false` to prevent automatic deletion. */ - delete_branch_on_merge?: boolean; - /** `true` to archive this repository. **Note**: You cannot unarchive repositories through the API. */ - archived?: boolean; - }; - }; - }; - }; - /** Lists all artifacts for a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - "actions/list-artifacts-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - artifacts: components["schemas"]["artifact"][]; - }; - }; - }; - }; - }; - /** Gets a specific artifact for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - "actions/get-artifact": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** artifact_id parameter */ - artifact_id: components["parameters"]["artifact_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["artifact"]; - }; - }; - }; - }; - /** Deletes an artifact for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ - "actions/delete-artifact": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** artifact_id parameter */ - artifact_id: components["parameters"]["artifact_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Gets a redirect URL to download an archive for a repository. This URL expires after 1 minute. Look for `Location:` in - * the response header to find the URL for the download. The `:archive_format` must be `zip`. Anyone with read access to - * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. - * GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - "actions/download-artifact": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** artifact_id parameter */ - artifact_id: components["parameters"]["artifact_id"]; - archive_format: string; - }; - }; - responses: { - /** Response */ - 302: never; - }; - }; - /** Gets a specific job in a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - "actions/get-job-for-workflow-run": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** job_id parameter */ - job_id: components["parameters"]["job_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["job"]; - }; - }; - }; - }; - /** - * Gets a redirect URL to download a plain text file of logs for a workflow job. This link expires after 1 minute. Look - * for `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can - * use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must - * have the `actions:read` permission to use this endpoint. - */ - "actions/download-job-logs-for-workflow-run": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** job_id parameter */ - job_id: components["parameters"]["job_id"]; - }; - }; - responses: { - /** Response */ - 302: never; - }; - }; - /** - * Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions allowed to run in the repository. - * - * You must authenticate using an access token with the `repo` scope to use this - * endpoint. GitHub Apps must have the `administration` repository permission to use this API. - */ - "actions/get-github-actions-permissions-repository": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["actions-repository-permissions"]; - }; - }; - }; - }; - /** - * Sets the GitHub Actions permissions policy for enabling GitHub Actions and allowed actions in the repository. - * - * If the repository belongs to an organization or enterprise that has set restrictive permissions at the organization or enterprise levels, such as `allowed_actions` to `selected` actions, then you cannot override them for the repository. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. - */ - "actions/set-github-actions-permissions-repository": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - enabled: components["schemas"]["actions-enabled"]; - allowed_actions?: components["schemas"]["allowed-actions"]; - }; - }; - }; - }; - /** - * Gets the settings for selected actions that are allowed in a repository. To use this endpoint, the repository policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. - */ - "actions/get-allowed-actions-repository": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["selected-actions"]; - }; - }; - }; - }; - /** - * Sets the actions that are allowed in a repository. To use this endpoint, the repository permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." - * - * If the repository belongs to an organization or enterprise that has `selected` actions set at the organization or enterprise levels, then you cannot override any of the allowed actions settings. - * - * To use the `patterns_allowed` setting for private repositories, the repository must belong to an enterprise. If the repository does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. - */ - "actions/set-allowed-actions-repository": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": components["schemas"]["selected-actions"]; - }; - }; - }; - /** Lists all self-hosted runners configured in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. */ - "actions/list-self-hosted-runners-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - runners: components["schemas"]["runner"][]; - }; - }; - }; - }; - }; - /** - * Lists binaries for the runner application that you can download and run. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. - */ - "actions/list-runner-applications-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["runner-application"][]; - }; - }; - }; - }; - /** - * Returns a token that you can pass to the `config` script. The token expires after one hour. You must authenticate - * using an access token with the `repo` scope to use this endpoint. - * - * #### Example using registration token - * - * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. - * - * ``` - * ./config.sh --url https://github.com/octo-org/octo-repo-artifacts --token TOKEN - * ``` - */ - "actions/create-registration-token-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["authentication-token"]; - }; - }; - }; - }; - /** - * Returns a token that you can pass to remove a self-hosted runner from a repository. The token expires after one hour. - * You must authenticate using an access token with the `repo` scope to use this endpoint. - * - * #### Example using remove token - * - * To remove your self-hosted runner from a repository, replace TOKEN with the remove token provided by this endpoint. - * - * ``` - * ./config.sh remove --token TOKEN - * ``` - */ - "actions/create-remove-token-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["authentication-token"]; - }; - }; - }; - }; - /** - * Gets a specific self-hosted runner configured in a repository. - * - * You must authenticate using an access token with the `repo` scope to use this - * endpoint. - */ - "actions/get-self-hosted-runner-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** Unique identifier of the self-hosted runner. */ - runner_id: components["parameters"]["runner_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["runner"]; - }; - }; - }; - }; - /** - * Forces the removal of a self-hosted runner from a repository. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. - * - * You must authenticate using an access token with the `repo` - * scope to use this endpoint. - */ - "actions/delete-self-hosted-runner-from-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** Unique identifier of the self-hosted runner. */ - runner_id: components["parameters"]["runner_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Lists all workflow runs for a repository. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). - * - * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - "actions/list-workflow-runs-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Returns someone's workflow runs. Use the login for the user who created the `push` associated with the check suite or workflow run. */ - actor?: components["parameters"]["actor"]; - /** Returns workflow runs associated with a branch. Use the name of the branch of the `push`. */ - branch?: components["parameters"]["workflow-run-branch"]; - /** Returns workflow run triggered by the event you specify. For example, `push`, `pull_request` or `issue`. For more information, see "[Events that trigger workflows](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/events-that-trigger-workflows)." */ - event?: components["parameters"]["event"]; - /** Returns workflow runs with the check run `status` or `conclusion` that you specify. For example, a conclusion can be `success` or a status can be `in_progress`. Only GitHub can set a status of `waiting` or `requested`. For a list of the possible `status` and `conclusion` options, see "[Create a check run](https://docs.github.com/rest/reference/checks#create-a-check-run)." */ - status?: components["parameters"]["workflow-run-status"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - workflow_runs: components["schemas"]["workflow-run"][]; - }; - }; - }; - }; - }; - /** Gets a specific workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - "actions/get-workflow-run": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The id of the workflow run */ - run_id: components["parameters"]["run-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["workflow-run"]; - }; - }; - }; - }; - /** - * Delete a specific workflow run. Anyone with write access to the repository can use this endpoint. If the repository is - * private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:write` permission to use - * this endpoint. - */ - "actions/delete-workflow-run": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The id of the workflow run */ - run_id: components["parameters"]["run-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - "actions/get-reviews-for-run": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The id of the workflow run */ - run_id: components["parameters"]["run-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["environment-approvals"][]; - }; - }; - }; - }; - /** Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - "actions/list-workflow-run-artifacts": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The id of the workflow run */ - run_id: components["parameters"]["run-id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - artifacts: components["schemas"]["artifact"][]; - }; - }; - }; - }; - }; - /** Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ - "actions/cancel-workflow-run": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The id of the workflow run */ - run_id: components["parameters"]["run-id"]; - }; - }; - responses: { - /** Response */ - 202: { - content: { - "application/json": { - [key: string]: any; - }; - }; - }; - }; - }; - /** Lists jobs for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). */ - "actions/list-jobs-for-workflow-run": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The id of the workflow run */ - run_id: components["parameters"]["run-id"]; - }; - query: { - /** - * Filters jobs by their `completed_at` timestamp. Can be one of: - * \* `latest`: Returns jobs from the most recent execution of the workflow run. - * \* `all`: Returns all jobs for a workflow run, including from old executions of the workflow run. - */ - filter?: "latest" | "all"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - jobs: components["schemas"]["job"][]; - }; - }; - }; - }; - }; - /** - * Gets a redirect URL to download an archive of log files for a workflow run. This link expires after 1 minute. Look for - * `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can use - * this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have - * the `actions:read` permission to use this endpoint. - */ - "actions/download-workflow-run-logs": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The id of the workflow run */ - run_id: components["parameters"]["run-id"]; - }; - }; - responses: { - /** Response */ - 302: never; - }; - }; - /** Deletes all logs for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ - "actions/delete-workflow-run-logs": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The id of the workflow run */ - run_id: components["parameters"]["run-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Get all deployment environments for a workflow run that are waiting for protection rules to pass. - * - * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - "actions/get-pending-deployments-for-run": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The id of the workflow run */ - run_id: components["parameters"]["run-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["pending-deployment"][]; - }; - }; - }; - }; - /** - * Approve or reject pending deployments that are waiting on approval by a required reviewer. - * - * Anyone with read access to the repository contents and deployments can use this endpoint. - */ - "actions/review-pending-deployments-for-run": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The id of the workflow run */ - run_id: components["parameters"]["run-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["deployment"][]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The list of environment ids to approve or reject */ - environment_ids: number[]; - /** Whether to approve or reject deployment to the specified environments. Must be one of: `approved` or `rejected` */ - state: "approved" | "rejected"; - /** A comment to accompany the deployment review */ - comment: string; - }; - }; - }; - }; - /** Re-runs your workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ - "actions/re-run-workflow": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The id of the workflow run */ - run_id: components["parameters"]["run-id"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": { - [key: string]: any; - }; - }; - }; - }; - }; - /** - * Gets the number of billable minutes and total run time for a specific workflow run. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - "actions/get-workflow-run-usage": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The id of the workflow run */ - run_id: components["parameters"]["run-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["workflow-run-usage"]; - }; - }; - }; - }; - /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - "actions/list-repo-secrets": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - secrets: components["schemas"]["actions-secret"][]; - }; - }; - }; - }; - }; - /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - "actions/get-repo-public-key": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["actions-public-key"]; - }; - }; - }; - }; - /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - "actions/get-repo-secret": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["actions-secret"]; - }; - }; - }; - }; - /** - * Creates or updates a repository secret with an encrypted value. Encrypt your secret using - * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access - * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use - * this endpoint. - * - * #### Example encrypting a secret using Node.js - * - * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. - * - * ``` - * const sodium = require('tweetsodium'); - * - * const key = "base64-encoded-public-key"; - * const value = "plain-text-secret"; - * - * // Convert the message and key to Uint8Array's (Buffer implements that interface) - * const messageBytes = Buffer.from(value); - * const keyBytes = Buffer.from(key, 'base64'); - * - * // Encrypt using LibSodium. - * const encryptedBytes = sodium.seal(messageBytes, keyBytes); - * - * // Base64 the encrypted secret - * const encrypted = Buffer.from(encryptedBytes).toString('base64'); - * - * console.log(encrypted); - * ``` - * - * - * #### Example encrypting a secret using Python - * - * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/stable/public/#nacl-public-sealedbox) with Python 3. - * - * ``` - * from base64 import b64encode - * from nacl import encoding, public - * - * def encrypt(public_key: str, secret_value: str) -> str: - * """Encrypt a Unicode string using the public key.""" - * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) - * sealed_box = public.SealedBox(public_key) - * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) - * return b64encode(encrypted).decode("utf-8") - * ``` - * - * #### Example encrypting a secret using C# - * - * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. - * - * ``` - * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); - * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); - * - * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); - * - * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); - * ``` - * - * #### Example encrypting a secret using Ruby - * - * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. - * - * ```ruby - * require "rbnacl" - * require "base64" - * - * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") - * public_key = RbNaCl::PublicKey.new(key) - * - * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) - * encrypted_secret = box.encrypt("my_secret") - * - * # Print the base64 encoded secret - * puts Base64.strict_encode64(encrypted_secret) - * ``` - */ - "actions/create-or-update-repo-secret": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - }; - }; - responses: { - /** Response when creating a secret */ - 201: { - content: { - "application/json": { - [key: string]: any; - }; - }; - }; - /** Response when updating a secret */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - /** Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get a repository public key](https://docs.github.com/rest/reference/actions#get-a-repository-public-key) endpoint. */ - encrypted_value?: string; - /** ID of the key you used to encrypt the secret. */ - key_id?: string; - }; - }; - }; - }; - /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - "actions/delete-repo-secret": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** Lists the workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - "actions/list-repo-workflows": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - workflows: components["schemas"]["workflow"][]; - }; - }; - }; - }; - }; - /** Gets a specific workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - "actions/get-workflow": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The ID of the workflow. You can also pass the workflow file name as a string. */ - workflow_id: components["parameters"]["workflow-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["workflow"]; - }; - }; - }; - }; - /** - * Disables a workflow and sets the `state` of the workflow to `disabled_manually`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. - */ - "actions/disable-workflow": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The ID of the workflow. You can also pass the workflow file name as a string. */ - workflow_id: components["parameters"]["workflow-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. - * - * You must configure your GitHub Actions workflow to run when the [`workflow_dispatch` webhook](/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch) event occurs. The `inputs` are configured in the workflow file. For more information about how to configure the `workflow_dispatch` event in the workflow file, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows#workflow_dispatch)." - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Creating a personal access token for the command line](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line)." - */ - "actions/create-workflow-dispatch": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The ID of the workflow. You can also pass the workflow file name as a string. */ - workflow_id: components["parameters"]["workflow-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - /** The git reference for the workflow. The reference can be a branch or tag name. */ - ref: string; - /** Input keys and values configured in the workflow file. The maximum number of properties is 10. Any default properties configured in the workflow file will be used when `inputs` are omitted. */ - inputs?: { - [key: string]: string; - }; - }; - }; - }; - }; - /** - * Enables a workflow and sets the `state` of the workflow to `active`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. - * - * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. - */ - "actions/enable-workflow": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The ID of the workflow. You can also pass the workflow file name as a string. */ - workflow_id: components["parameters"]["workflow-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * List all workflow runs for a workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). - * - * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. - */ - "actions/list-workflow-runs": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The ID of the workflow. You can also pass the workflow file name as a string. */ - workflow_id: components["parameters"]["workflow-id"]; - }; - query: { - /** Returns someone's workflow runs. Use the login for the user who created the `push` associated with the check suite or workflow run. */ - actor?: components["parameters"]["actor"]; - /** Returns workflow runs associated with a branch. Use the name of the branch of the `push`. */ - branch?: components["parameters"]["workflow-run-branch"]; - /** Returns workflow run triggered by the event you specify. For example, `push`, `pull_request` or `issue`. For more information, see "[Events that trigger workflows](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/events-that-trigger-workflows)." */ - event?: components["parameters"]["event"]; - /** Returns workflow runs with the check run `status` or `conclusion` that you specify. For example, a conclusion can be `success` or a status can be `in_progress`. Only GitHub can set a status of `waiting` or `requested`. For a list of the possible `status` and `conclusion` options, see "[Create a check run](https://docs.github.com/rest/reference/checks#create-a-check-run)." */ - status?: components["parameters"]["workflow-run-status"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - workflow_runs: components["schemas"]["workflow-run"][]; - }; - }; - }; - }; - }; - /** - * Gets the number of billable minutes used by a specific workflow during the current billing cycle. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - "actions/get-workflow-usage": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The ID of the workflow. You can also pass the workflow file name as a string. */ - workflow_id: components["parameters"]["workflow-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["workflow-usage"]; - }; - }; - }; - }; - /** Lists the [available assignees](https://help.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) for issues in a repository. */ - "issues/list-assignees": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Checks if a user has permission to be assigned to an issue in this repository. - * - * If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. - * - * Otherwise a `404` status code is returned. - */ - "issues/check-user-can-be-assigned": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - assignee: string; - }; - }; - responses: { - /** If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. */ - 204: never; - /** Otherwise a `404` status code is returned. */ - 404: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - }; - }; - /** Enables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://help.github.com/en/articles/configuring-automated-security-fixes)". */ - "repos/enable-automated-security-fixes": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** Disables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://help.github.com/en/articles/configuring-automated-security-fixes)". */ - "repos/disable-automated-security-fixes": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - "repos/list-branches": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Setting to `true` returns only protected branches. When set to `false`, only unprotected branches are returned. Omitting this parameter returns all branches. */ - protected?: boolean; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["short-branch"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - "repos/get-branch": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["branch-with-protection"]; - }; - }; - 301: components["responses"]["moved_permanently"]; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - "repos/get-branch-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["branch-protection"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Protecting a branch requires admin or owner permissions to the repository. - * - * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. - * - * **Note**: The list of users, apps, and teams in total is limited to 100 items. - */ - "repos/update-branch-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["protected-branch"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** Require status checks to pass before merging. Set to `null` to disable. */ - required_status_checks: { - /** Require branches to be up to date before merging. */ - strict: boolean; - /** The list of status checks to require in order to merge into this branch */ - contexts: string[]; - } | null; - /** Enforce all configured restrictions for administrators. Set to `true` to enforce required status checks for repository administrators. Set to `null` to disable. */ - enforce_admins: boolean | null; - /** Require at least one approving review on a pull request, before merging. Set to `null` to disable. */ - required_pull_request_reviews: { - /** Specify which users and teams can dismiss pull request reviews. Pass an empty `dismissal_restrictions` object to disable. User and team `dismissal_restrictions` are only available for organization-owned repositories. Omit this parameter for personal repositories. */ - dismissal_restrictions?: { - /** The list of user `login`s with dismissal access */ - users?: string[]; - /** The list of team `slug`s with dismissal access */ - teams?: string[]; - }; - /** Set to `true` if you want to automatically dismiss approving reviews when someone pushes a new commit. */ - dismiss_stale_reviews?: boolean; - /** Blocks merging pull requests until [code owners](https://help.github.com/articles/about-code-owners/) review them. */ - require_code_owner_reviews?: boolean; - /** Specify the number of reviewers required to approve pull requests. Use a number between 1 and 6. */ - required_approving_review_count?: number; - } | null; - /** Restrict who can push to the protected branch. User, app, and team `restrictions` are only available for organization-owned repositories. Set to `null` to disable. */ - restrictions: { - /** The list of user `login`s with push access */ - users: string[]; - /** The list of team `slug`s with push access */ - teams: string[]; - /** The list of app `slug`s with push access */ - apps?: string[]; - } | null; - /** Enforces a linear commit Git history, which prevents anyone from pushing merge commits to a branch. Set to `true` to enforce a linear commit history. Set to `false` to disable a linear commit Git history. Your repository must allow squash merging or rebase merging before you can enable a linear commit history. Default: `false`. For more information, see "[Requiring a linear commit history](https://help.github.com/github/administering-a-repository/requiring-a-linear-commit-history)" in the GitHub Help documentation. */ - required_linear_history?: boolean; - /** Permits force pushes to the protected branch by anyone with write access to the repository. Set to `true` to allow force pushes. Set to `false` or `null` to block force pushes. Default: `false`. For more information, see "[Enabling force pushes to a protected branch](https://help.github.com/en/github/administering-a-repository/enabling-force-pushes-to-a-protected-branch)" in the GitHub Help documentation." */ - allow_force_pushes?: boolean | null; - /** Allows deletion of the protected branch by anyone with write access to the repository. Set to `false` to prevent deletion of the protected branch. Default: `false`. For more information, see "[Enabling force pushes to a protected branch](https://help.github.com/en/github/administering-a-repository/enabling-force-pushes-to-a-protected-branch)" in the GitHub Help documentation. */ - allow_deletions?: boolean; - }; - }; - }; - }; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - "repos/delete-branch-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 204: never; - 403: components["responses"]["forbidden"]; - }; - }; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - "repos/get-admin-branch-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["protected-branch-admin-enforced"]; - }; - }; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Adding admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. - */ - "repos/set-admin-branch-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["protected-branch-admin-enforced"]; - }; - }; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Removing admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. - */ - "repos/delete-admin-branch-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - "repos/get-pull-request-review-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["protected-branch-pull-request-review"]; - }; - }; - }; - }; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - "repos/delete-pull-request-review-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Updating pull request review enforcement requires admin or owner permissions to the repository and branch protection to be enabled. - * - * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. - */ - "repos/update-pull-request-review-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["protected-branch-pull-request-review"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Specify which users and teams can dismiss pull request reviews. Pass an empty `dismissal_restrictions` object to disable. User and team `dismissal_restrictions` are only available for organization-owned repositories. Omit this parameter for personal repositories. */ - dismissal_restrictions?: { - /** The list of user `login`s with dismissal access */ - users?: string[]; - /** The list of team `slug`s with dismissal access */ - teams?: string[]; - }; - /** Set to `true` if you want to automatically dismiss approving reviews when someone pushes a new commit. */ - dismiss_stale_reviews?: boolean; - /** Blocks merging pull requests until [code owners](https://help.github.com/articles/about-code-owners/) have reviewed. */ - require_code_owner_reviews?: boolean; - /** Specifies the number of reviewers required to approve pull requests. Use a number between 1 and 6. */ - required_approving_review_count?: number; - }; - }; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * When authenticated with admin or owner permissions to the repository, you can use this endpoint to check whether a branch requires signed commits. An enabled status of `true` indicates you must sign commits on this branch. For more information, see [Signing commits with GPG](https://help.github.com/articles/signing-commits-with-gpg) in GitHub Help. - * - * **Note**: You must enable branch protection to require signed commits. - */ - "repos/get-commit-signature-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["protected-branch-admin-enforced"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * When authenticated with admin or owner permissions to the repository, you can use this endpoint to require signed commits on a branch. You must enable branch protection to require signed commits. - */ - "repos/create-commit-signature-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["protected-branch-admin-enforced"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * When authenticated with admin or owner permissions to the repository, you can use this endpoint to disable required signed commits on a branch. You must enable branch protection to require signed commits. - */ - "repos/delete-commit-signature-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - "repos/get-status-checks-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["status-check-policy"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - "repos/remove-status-check-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. - */ - "repos/update-status-check-protection": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["status-check-policy"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Require branches to be up to date before merging. */ - strict?: boolean; - /** The list of status checks to require in order to merge into this branch */ - contexts?: string[]; - }; - }; - }; - }; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - "repos/get-all-status-check-contexts": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": string[]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - "repos/set-status-check-contexts": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": string[]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** contexts parameter */ - contexts: string[]; - }; - }; - }; - }; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - "repos/add-status-check-contexts": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": string[]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** contexts parameter */ - contexts: string[]; - }; - }; - }; - }; - /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - "repos/remove-status-check-contexts": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": string[]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** contexts parameter */ - contexts: string[]; - }; - }; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists who has access to this protected branch. - * - * **Note**: Users, apps, and teams `restrictions` are only available for organization-owned repositories. - */ - "repos/get-access-restrictions": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["branch-restriction-policy"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Disables the ability to restrict who can push to this branch. - */ - "repos/delete-access-restrictions": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists the GitHub Apps that have push access to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. - */ - "repos/get-apps-with-access-to-protected-branch": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["integration"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Replaces the list of apps that have push access to this branch. This removes all apps that previously had push access and grants push access to the new list of apps. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. - * - * | Type | Description | - * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - "repos/set-app-access-restrictions": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["integration"][]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** apps parameter */ - apps: string[]; - }; - }; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Grants the specified apps push access for this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. - * - * | Type | Description | - * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - "repos/add-app-access-restrictions": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["integration"][]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** apps parameter */ - apps: string[]; - }; - }; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Removes the ability of an app to push to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. - * - * | Type | Description | - * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - "repos/remove-app-access-restrictions": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["integration"][]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** apps parameter */ - apps: string[]; - }; - }; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists the teams who have push access to this branch. The list includes child teams. - */ - "repos/get-teams-with-access-to-protected-branch": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Replaces the list of teams that have push access to this branch. This removes all teams that previously had push access and grants push access to the new list of teams. Team restrictions include child teams. - * - * | Type | Description | - * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | - * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - "repos/set-team-access-restrictions": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team"][]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** teams parameter */ - teams: string[]; - }; - }; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Grants the specified teams push access for this branch. You can also give push access to child teams. - * - * | Type | Description | - * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | - * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - "repos/add-team-access-restrictions": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team"][]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** teams parameter */ - teams: string[]; - }; - }; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Removes the ability of a team to push to this branch. You can also remove push access for child teams. - * - * | Type | Description | - * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | Teams that should no longer have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - "repos/remove-team-access-restrictions": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team"][]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** teams parameter */ - teams: string[]; - }; - }; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists the people who have push access to this branch. - */ - "repos/get-users-with-access-to-protected-branch": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Replaces the list of people that have push access to this branch. This removes all people that previously had push access and grants push access to the new list of people. - * - * | Type | Description | - * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | - * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - "repos/set-user-access-restrictions": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** users parameter */ - users: string[]; - }; - }; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Grants the specified people push access for this branch. - * - * | Type | Description | - * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | - * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - "repos/add-user-access-restrictions": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** users parameter */ - users: string[]; - }; - }; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Removes the ability of a user to push to this branch. - * - * | Type | Description | - * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------- | - * | `array` | Usernames of the people who should no longer have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | - */ - "repos/remove-user-access-restrictions": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** users parameter */ - users: string[]; - }; - }; - }; - }; - /** - * Renames a branch in a repository. - * - * **Note:** Although the API responds immediately, the branch rename process might take some extra time to complete in the background. You won't be able to push to the old branch name while the rename process is in progress. For more information, see "[Renaming a branch](https://docs.github.com/github/administering-a-repository/renaming-a-branch)". - * - * The permissions required to use this endpoint depends on whether you are renaming the default branch. - * - * To rename a non-default branch: - * - * * Users must have push access. - * * GitHub Apps must have the `contents:write` repository permission. - * - * To rename the default branch: - * - * * Users must have admin or owner permissions. - * * GitHub Apps must have the `administration:write` repository permission. - */ - "repos/rename-branch": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the branch. */ - branch: components["parameters"]["branch"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["branch-with-protection"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The new name of the branch. */ - new_name: string; - }; - }; - }; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Creates a new check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to create check runs. - * - * In a check suite, GitHub limits the number of check runs with the same name to 1000. Once these check runs exceed 1000, GitHub will start to automatically delete older check runs. - */ - "checks/create": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["check-run"]; - }; - }; - }; - requestBody: { - content: { - "application/json": (({ - status: "completed"; - } & { - [key: string]: any; - }) | ({ - status?: "queued" | "in_progress"; - } & { - [key: string]: any; - })) & { - /** The name of the check. For example, "code-coverage". */ - name: string; - /** The SHA of the commit. */ - head_sha: string; - /** The URL of the integrator's site that has the full details of the check. If the integrator does not provide this, then the homepage of the GitHub app is used. */ - details_url?: string; - /** A reference for the run on the integrator's system. */ - external_id?: string; - /** The current status. Can be one of `queued`, `in_progress`, or `completed`. */ - status?: "queued" | "in_progress" | "completed"; - /** The time that the check run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - started_at?: string; - /** - * **Required if you provide `completed_at` or a `status` of `completed`**. The final conclusion of the check. Can be one of `action_required`, `cancelled`, `failure`, `neutral`, `success`, `skipped`, `stale`, or `timed_out`. When the conclusion is `action_required`, additional details should be provided on the site specified by `details_url`. - * **Note:** Providing `conclusion` will automatically set the `status` parameter to `completed`. You cannot change a check run conclusion to `stale`, only GitHub can set this. - */ - conclusion?: "action_required" | "cancelled" | "failure" | "neutral" | "success" | "skipped" | "stale" | "timed_out"; - /** The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - completed_at?: string; - /** Check runs can accept a variety of data in the `output` object, including a `title` and `summary` and can optionally provide descriptive details about the run. See the [`output` object](https://docs.github.com/rest/reference/checks#output-object) description. */ - output?: { - /** The title of the check run. */ - title: string; - /** The summary of the check run. This parameter supports Markdown. */ - summary: string; - /** The details of the check run. This parameter supports Markdown. */ - text?: string; - /** Adds information from your analysis to specific lines of code. Annotations are visible on GitHub in the **Checks** and **Files changed** tab of the pull request. The Checks API limits the number of annotations to a maximum of 50 per API request. To create more than 50 annotations, you have to make multiple requests to the [Update a check run](https://docs.github.com/rest/reference/checks#update-a-check-run) endpoint. Each time you update the check run, annotations are appended to the list of annotations that already exist for the check run. For details about how you can view annotations on GitHub, see "[About status checks](https://help.github.com/articles/about-status-checks#checks)". See the [`annotations` object](https://docs.github.com/rest/reference/checks#annotations-object) description for details about how to use this parameter. */ - annotations?: { - /** The path of the file to add an annotation to. For example, `assets/css/main.css`. */ - path: string; - /** The start line of the annotation. */ - start_line: number; - /** The end line of the annotation. */ - end_line: number; - /** The start column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ - start_column?: number; - /** The end column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ - end_column?: number; - /** The level of the annotation. Can be one of `notice`, `warning`, or `failure`. */ - annotation_level: "notice" | "warning" | "failure"; - /** A short description of the feedback for these lines of code. The maximum size is 64 KB. */ - message: string; - /** The title that represents the annotation. The maximum size is 255 characters. */ - title?: string; - /** Details about this annotation. The maximum size is 64 KB. */ - raw_details?: string; - }[]; - /** Adds images to the output displayed in the GitHub pull request UI. See the [`images` object](https://docs.github.com/rest/reference/checks#images-object) description for details. */ - images?: { - /** The alternative text for the image. */ - alt: string; - /** The full URL of the image. */ - image_url: string; - /** A short image description. */ - caption?: string; - }[]; - }; - /** Displays a button on GitHub that can be clicked to alert your app to do additional tasks. For example, a code linting app can display a button that automatically fixes detected errors. The button created in this object is displayed after the check run completes. When a user clicks the button, GitHub sends the [`check_run.requested_action` webhook](https://docs.github.com/webhooks/event-payloads/#check_run) to your app. Each action includes a `label`, `identifier` and `description`. A maximum of three actions are accepted. See the [`actions` object](https://docs.github.com/rest/reference/checks#actions-object) description. To learn more about check runs and requested actions, see "[Check runs and requested actions](https://docs.github.com/rest/reference/checks#check-runs-and-requested-actions)." To learn more about check runs and requested actions, see "[Check runs and requested actions](https://docs.github.com/rest/reference/checks#check-runs-and-requested-actions)." */ - actions?: { - /** The text to be displayed on a button in the web UI. The maximum size is 20 characters. */ - label: string; - /** A short explanation of what this action would do. The maximum size is 40 characters. */ - description: string; - /** A reference for the action on the integrator's system. The maximum size is 20 characters. */ - identifier: string; - }[]; - }; - }; - }; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Gets a single check run using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. - */ - "checks/get": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** check_run_id parameter */ - check_run_id: components["parameters"]["check_run_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["check-run"]; - }; - }; - }; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Updates a check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to edit check runs. - */ - "checks/update": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** check_run_id parameter */ - check_run_id: components["parameters"]["check_run_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["check-run"]; - }; - }; - }; - requestBody: { - content: { - "application/json": (Partial<{ - status?: "completed"; - } & { - [key: string]: any; - }> & Partial<{ - status?: "queued" | "in_progress"; - } & { - [key: string]: any; - }>) & { - /** The name of the check. For example, "code-coverage". */ - name?: string; - /** The URL of the integrator's site that has the full details of the check. */ - details_url?: string; - /** A reference for the run on the integrator's system. */ - external_id?: string; - /** This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - started_at?: string; - /** The current status. Can be one of `queued`, `in_progress`, or `completed`. */ - status?: "queued" | "in_progress" | "completed"; - /** - * **Required if you provide `completed_at` or a `status` of `completed`**. The final conclusion of the check. Can be one of `action_required`, `cancelled`, `failure`, `neutral`, `success`, `skipped`, `stale`, or `timed_out`. - * **Note:** Providing `conclusion` will automatically set the `status` parameter to `completed`. You cannot change a check run conclusion to `stale`, only GitHub can set this. - */ - conclusion?: "action_required" | "cancelled" | "failure" | "neutral" | "success" | "skipped" | "stale" | "timed_out"; - /** The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - completed_at?: string; - /** Check runs can accept a variety of data in the `output` object, including a `title` and `summary` and can optionally provide descriptive details about the run. See the [`output` object](https://docs.github.com/rest/reference/checks#output-object-1) description. */ - output?: { - /** **Required**. */ - title?: string; - /** Can contain Markdown. */ - summary: string; - /** Can contain Markdown. */ - text?: string; - /** Adds information from your analysis to specific lines of code. Annotations are visible in GitHub's pull request UI. Annotations are visible in GitHub's pull request UI. The Checks API limits the number of annotations to a maximum of 50 per API request. To create more than 50 annotations, you have to make multiple requests to the [Update a check run](https://docs.github.com/rest/reference/checks#update-a-check-run) endpoint. Each time you update the check run, annotations are appended to the list of annotations that already exist for the check run. For details about annotations in the UI, see "[About status checks](https://help.github.com/articles/about-status-checks#checks)". See the [`annotations` object](https://docs.github.com/rest/reference/checks#annotations-object-1) description for details. */ - annotations?: { - /** The path of the file to add an annotation to. For example, `assets/css/main.css`. */ - path: string; - /** The start line of the annotation. */ - start_line: number; - /** The end line of the annotation. */ - end_line: number; - /** The start column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ - start_column?: number; - /** The end column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ - end_column?: number; - /** The level of the annotation. Can be one of `notice`, `warning`, or `failure`. */ - annotation_level: "notice" | "warning" | "failure"; - /** A short description of the feedback for these lines of code. The maximum size is 64 KB. */ - message: string; - /** The title that represents the annotation. The maximum size is 255 characters. */ - title?: string; - /** Details about this annotation. The maximum size is 64 KB. */ - raw_details?: string; - }[]; - /** Adds images to the output displayed in the GitHub pull request UI. See the [`images` object](https://docs.github.com/rest/reference/checks#annotations-object-1) description for details. */ - images?: { - /** The alternative text for the image. */ - alt: string; - /** The full URL of the image. */ - image_url: string; - /** A short image description. */ - caption?: string; - }[]; - }; - /** Possible further actions the integrator can perform, which a user may trigger. Each action includes a `label`, `identifier` and `description`. A maximum of three actions are accepted. See the [`actions` object](https://docs.github.com/rest/reference/checks#actions-object) description. To learn more about check runs and requested actions, see "[Check runs and requested actions](https://docs.github.com/rest/reference/checks#check-runs-and-requested-actions)." */ - actions?: { - /** The text to be displayed on a button in the web UI. The maximum size is 20 characters. */ - label: string; - /** A short explanation of what this action would do. The maximum size is 40 characters. */ - description: string; - /** A reference for the action on the integrator's system. The maximum size is 20 characters. */ - identifier: string; - }[]; - }; - }; - }; - }; - /** Lists annotations for a check run using the annotation `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get annotations for a check run. OAuth Apps and authenticated users must have the `repo` scope to get annotations for a check run in a private repository. */ - "checks/list-annotations": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** check_run_id parameter */ - check_run_id: components["parameters"]["check_run_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["check-annotation"][]; - }; - }; - }; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. - * - * By default, check suites are automatically created when you create a [check run](https://docs.github.com/rest/reference/checks#check-runs). You only need to use this endpoint for manually creating check suites when you've disabled automatic creation using "[Update repository preferences for check suites](https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites)". Your GitHub App must have the `checks:write` permission to create check suites. - */ - "checks/create-suite": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** when the suite already existed */ - 200: { - content: { - "application/json": components["schemas"]["check-suite"]; - }; - }; - /** Response when the suite was created */ - 201: { - content: { - "application/json": components["schemas"]["check-suite"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The sha of the head commit. */ - head_sha: string; - }; - }; - }; - }; - /** Changes the default automatic flow when creating check suites. By default, a check suite is automatically created each time code is pushed to a repository. When you disable the automatic creation of check suites, you can manually [Create a check suite](https://docs.github.com/rest/reference/checks#create-a-check-suite). You must have admin permissions in the repository to set preferences for check suites. */ - "checks/set-suites-preferences": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["check-suite-preference"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** Enables or disables automatic creation of CheckSuite events upon pushes to the repository. Enabled by default. See the [`auto_trigger_checks` object](https://docs.github.com/rest/reference/checks#auto_trigger_checks-object) description for details. */ - auto_trigger_checks?: { - /** The `id` of the GitHub App. */ - app_id: number; - /** Set to `true` to enable automatic creation of CheckSuite events upon pushes to the repository, or `false` to disable them. */ - setting: boolean; - }[]; - }; - }; - }; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. - * - * Gets a single check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. - */ - "checks/get-suite": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** check_suite_id parameter */ - check_suite_id: components["parameters"]["check_suite_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["check-suite"]; - }; - }; - }; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Lists check runs for a check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. - */ - "checks/list-for-suite": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** check_suite_id parameter */ - check_suite_id: components["parameters"]["check_suite_id"]; - }; - query: { - /** Returns check runs with the specified `name`. */ - check_name?: components["parameters"]["check_name"]; - /** Returns check runs with the specified `status`. Can be one of `queued`, `in_progress`, or `completed`. */ - status?: components["parameters"]["status"]; - /** Filters check runs by their `completed_at` timestamp. Can be one of `latest` (returning the most recent check runs) or `all`. */ - filter?: "latest" | "all"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - check_runs: components["schemas"]["check-run"][]; - }; - }; - }; - }; - }; - /** - * Triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. This endpoint will trigger the [`check_suite` webhook](https://docs.github.com/webhooks/event-payloads/#check_suite) event with the action `rerequested`. When a check suite is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. - * - * To rerequest a check suite, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. - */ - "checks/rerequest-suite": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** check_suite_id parameter */ - check_suite_id: components["parameters"]["check_suite_id"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": { - [key: string]: any; - }; - }; - }; - }; - }; - /** - * Lists all open code scanning alerts for the default branch (usually `main` - * or `master`). You must use an access token with the `security_events` scope to use - * this endpoint. GitHub Apps must have the `security_events` read permission to use - * this endpoint. - * - * The response includes a `most_recent_instance` object. - * This provides details of the most recent instance of this alert - * for the default branch or for the specified Git reference - * (if you used `ref` in the request). - */ - "code-scanning/list-alerts-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ - tool_name?: components["parameters"]["tool_name"]; - /** The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ - tool_guid?: components["parameters"]["tool_guid"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ - ref?: components["parameters"]["git_ref"]; - /** Set to `open`, `fixed`, or `dismissed` to list code scanning alerts in a specific state. */ - state?: components["schemas"]["code-scanning-alert-state"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["code-scanning-alert-items"][]; - }; - }; - 403: components["responses"]["code_scanning_forbidden_read"]; - 404: components["responses"]["not_found"]; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** - * Gets a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` read permission to use this endpoint. - * - * **Deprecation notice**: - * The instances field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The same information can now be retrieved via a GET request to the URL specified by `instances_url`. - */ - "code-scanning/get-alert": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ - alert_number: components["parameters"]["alert_number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["code-scanning-alert"]; - }; - }; - 403: components["responses"]["code_scanning_forbidden_read"]; - 404: components["responses"]["not_found"]; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** Updates the status of a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint. */ - "code-scanning/update-alert": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ - alert_number: components["parameters"]["alert_number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["code-scanning-alert"]; - }; - }; - 403: components["responses"]["code_scanning_forbidden_write"]; - 404: components["responses"]["not_found"]; - 503: components["responses"]["service_unavailable"]; - }; - requestBody: { - content: { - "application/json": { - state: components["schemas"]["code-scanning-alert-set-state"]; - dismissed_reason?: components["schemas"]["code-scanning-alert-dismissed-reason"]; - }; - }; - }; - }; - /** Lists all instances of the specified code scanning alert. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` read permission to use this endpoint. */ - "code-scanning/list-alerts-instances": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ - alert_number: components["parameters"]["alert_number"]; - }; - query: { - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ - ref?: components["parameters"]["git_ref"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["code-scanning-alert-instance"][]; - }; - }; - 403: components["responses"]["code_scanning_forbidden_read"]; - 404: components["responses"]["not_found"]; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** - * Lists the details of all code scanning analyses for a repository, - * starting with the most recent. - * The response is paginated and you can use the `page` and `per_page` parameters - * to list the analyses you're interested in. - * By default 30 analyses are listed per page. - * - * The `rules_count` field in the response give the number of rules - * that were run in the analysis. - * For very old analyses this data is not available, - * and `0` is returned in this field. - * - * You must use an access token with the `security_events` scope to use this endpoint. - * GitHub Apps must have the `security_events` read permission to use this endpoint. - * - * **Deprecation notice**: - * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. - */ - "code-scanning/list-recent-analyses": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ - tool_name?: components["parameters"]["tool_name"]; - /** The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ - tool_guid?: components["parameters"]["tool_guid"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** The Git reference for the analyses you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ - ref?: components["schemas"]["code-scanning-ref"]; - /** Filter analyses belonging to the same SARIF upload. */ - sarif_id?: components["schemas"]["code-scanning-analysis-sarif-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["code-scanning-analysis"][]; - }; - }; - 403: components["responses"]["code_scanning_forbidden_read"]; - 404: components["responses"]["not_found"]; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** - * Gets a specified code scanning analysis for a repository. - * You must use an access token with the `security_events` scope to use this endpoint. - * GitHub Apps must have the `security_events` read permission to use this endpoint. - * - * The default JSON response contains fields that describe the analysis. - * This includes the Git reference and commit SHA to which the analysis relates, - * the datetime of the analysis, the name of the code scanning tool, - * and the number of alerts. - * - * The `rules_count` field in the default response give the number of rules - * that were run in the analysis. - * For very old analyses this data is not available, - * and `0` is returned in this field. - * - * If you use the Accept header `application/sarif+json`, - * the response contains the analysis data that was uploaded. - * This is formatted as - * [SARIF version 2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html). - * For an example response, see "[Custom media type for code scanning](#custom-media-type-for-code-scanning)." - * - * **Deprecation notice**: - * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. - */ - "code-scanning/get-analysis": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The ID of the analysis, as returned from the `GET /repos/{owner}/{repo}/code-scanning/analyses` operation. */ - analysis_id: number; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json+sarif": string; - "application/json": components["schemas"]["code-scanning-analysis"]; - }; - }; - 403: components["responses"]["code_scanning_forbidden_read"]; - 404: components["responses"]["not_found"]; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** - * Deletes a specified code scanning analysis from a repository. For - * private repositories, you must use an access token with the `repo` scope. For public repositories, - * you must use an access token with `public_repo` and `repo:security_events` scopes. - * GitHub Apps must have the `security_events` write permission to use this endpoint. - * - * You can delete one analysis at a time. - * To delete a series of analyses, start with the most recent analysis and work backwards. - * Conceptually, the process is similar to the undo function in a text editor. - * - * When you list the analyses for a repository, - * one or more will be identified as deletable in the response: - * - * ``` - * "deletable": true - * ``` - * - * An analysis is deletable when it's the most recent in a set of analyses. - * Typically, a repository will have multiple sets of analyses - * for each enabled code scanning tool, - * where a set is determined by a unique combination of analysis values: - * - * * `ref` - * * `tool` - * * `analysis_key` - * * `environment` - * - * If you attempt to delete an analysis that is not the most recent in a set, - * you'll get a 400 response with the message: - * - * ``` - * Analysis specified is not deletable. - * ``` - * - * The response from a successful `DELETE` operation provides you with - * two alternative URLs for deleting the next analysis in the set - * (see the example default response below). - * Use the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis - * in the set. This is a useful option if you want to preserve at least one analysis - * for the specified tool in your repository. - * Use the `confirm_delete_url` URL if you are content to remove all analyses for a tool. - * When you delete the last analysis in a set the value of `next_analysis_url` and `confirm_delete_url` - * in the 200 response is `null`. - * - * As an example of the deletion process, - * let's imagine that you added a workflow that configured a particular code scanning tool - * to analyze the code in a repository. This tool has added 15 analyses: - * 10 on the default branch, and another 5 on a topic branch. - * You therefore have two separate sets of analyses for this tool. - * You've now decided that you want to remove all of the analyses for the tool. - * To do this you must make 15 separate deletion requests. - * To start, you must find the deletable analysis for one of the sets, - * step through deleting the analyses in that set, - * and then repeat the process for the second set. - * The procedure therefore consists of a nested loop: - * - * **Outer loop**: - * * List the analyses for the repository, filtered by tool. - * * Parse this list to find a deletable analysis. If found: - * - * **Inner loop**: - * * Delete the identified analysis. - * * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration. - * - * The above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely. - */ - "code-scanning/delete-analysis": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The ID of the analysis, as returned from the `GET /repos/{owner}/{repo}/code-scanning/analyses` operation. */ - analysis_id: number; - }; - query: { - /** Allow deletion if the specified analysis is the last in a set. If you attempt to delete the final analysis in a set without setting this parameter to `true`, you'll get a 400 response with the message: `Analysis is last of its type and deletion may result in the loss of historical alert data. Please specify confirm_delete.` */ - confirm_delete?: string | null; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["code-scanning-analysis-deletion"]; - }; - }; - 400: components["responses"]["bad_request"]; - 403: components["responses"]["code_scanning_forbidden_write"]; - 404: components["responses"]["not_found"]; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** - * Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` write permission to use this endpoint. - * - * There are two places where you can upload code scanning results. - * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/github/finding-security-vulnerabilities-and-errors-in-your-code/triaging-code-scanning-alerts-in-pull-requests)." - * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/github/finding-security-vulnerabilities-and-errors-in-your-code/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." - * - * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: - * - * ``` - * gzip -c analysis-data.sarif | base64 -w0 - * ``` - * - * SARIF upload supports a maximum of 5000 results per analysis run. Any results over this limit are ignored and any SARIF uploads with more than 25,000 results are rejected. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries. - * - * The `202 Accepted`, response includes an `id` value. - * You can use this ID to check the status of the upload by using this for the `/sarifs/{sarif_id}` endpoint. - * For more information, see "[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload)." - */ - "code-scanning/upload-sarif": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 202: { - content: { - "application/json": components["schemas"]["code-scanning-sarifs-receipt"]; - }; - }; - /** Bad Request if the sarif field is invalid */ - 400: unknown; - 403: components["responses"]["code_scanning_forbidden_write"]; - 404: components["responses"]["not_found"]; - /** Payload Too Large if the sarif field is too large */ - 413: unknown; - 503: components["responses"]["service_unavailable"]; - }; - requestBody: { - content: { - "application/json": { - commit_sha: components["schemas"]["code-scanning-analysis-commit-sha"]; - ref: components["schemas"]["code-scanning-ref"]; - sarif: components["schemas"]["code-scanning-analysis-sarif-file"]; - /** - * The base directory used in the analysis, as it appears in the SARIF file. - * This property is used to convert file paths from absolute to relative, so that alerts can be mapped to their correct location in the repository. - */ - checkout_uri?: string; - /** The time that the analysis run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - started_at?: string; - /** The name of the tool used to generate the code scanning analysis. If this parameter is not used, the tool name defaults to "API". If the uploaded SARIF contains a tool GUID, this will be available for filtering using the `tool_guid` parameter of operations such as `GET /repos/{owner}/{repo}/code-scanning/alerts`. */ - tool_name?: string; - }; - }; - }; - }; - /** Gets information about a SARIF upload, including the status and the URL of the analysis that was uploaded so that you can retrieve details of the analysis. For more information, see "[Get a code scanning analysis for a repository](/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository)." You must use an access token with the `security_events` scope to use this endpoint. GitHub Apps must have the `security_events` read permission to use this endpoint. */ - "code-scanning/get-sarif": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The SARIF ID obtained after uploading. */ - sarif_id: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["code-scanning-sarifs-status"]; - }; - }; - 403: components["responses"]["code_scanning_forbidden_read"]; - /** Not Found if the sarif id does not match any upload */ - 404: unknown; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** - * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. - * - * Team members will include the members of child teams. - */ - "repos/list-collaborators": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** - * Filter collaborators returned by their affiliation. Can be one of: - * \* `outside`: All outside collaborators of an organization-owned repository. - * \* `direct`: All collaborators with permissions to an organization-owned repository, regardless of organization membership status. - * \* `all`: All collaborators the authenticated user can see. - */ - affiliation?: "outside" | "direct" | "all"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["collaborator"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. - * - * Team members will include the members of child teams. - */ - "repos/check-collaborator": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response if user is a collaborator */ - 204: never; - /** Not Found if user is not a collaborator */ - 404: unknown; - }; - }; - /** - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - * - * For more information the permission levels, see "[Repository permission levels for an organization](https://help.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". - * - * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - * - * The invitee will receive a notification that they have been invited to the repository, which they must accept or decline. They may do this via the notifications page, the email they receive, or by using the [repository invitations API endpoints](https://docs.github.com/rest/reference/repos#invitations). - * - * **Rate limits** - * - * To prevent abuse, you are limited to sending 50 invitations to a repository per 24 hour period. Note there is no limit if you are inviting organization members to an organization repository. - */ - "repos/add-collaborator": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response when a new invitation is created */ - 201: { - content: { - "application/json": components["schemas"]["repository-invitation"]; - }; - }; - /** Response when person is already a collaborator */ - 204: never; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** - * The permission to grant the collaborator. **Only valid on organization-owned repositories.** Can be one of: - * \* `pull` - can pull, but not push to or administer this repository. - * \* `push` - can pull and push, but not administer this repository. - * \* `admin` - can pull, push and administer this repository. - * \* `maintain` - Recommended for project managers who need to manage the repository without access to sensitive or destructive actions. - * \* `triage` - Recommended for contributors who need to proactively manage issues and pull requests without write access. - */ - permission?: "pull" | "push" | "admin" | "maintain" | "triage"; - permissions?: string; - }; - }; - }; - }; - "repos/remove-collaborator": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** Checks the repository permission of a collaborator. The possible repository permissions are `admin`, `write`, `read`, and `none`. */ - "repos/get-collaborator-permission-level": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** if user has admin permissions */ - 200: { - content: { - "application/json": components["schemas"]["repository-collaborator-permission"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Commit Comments use [these custom media types](https://docs.github.com/rest/reference/repos#custom-media-types). You can read more about the use of media types in the API [here](https://docs.github.com/rest/overview/media-types/). - * - * Comments are ordered by ascending ID. - */ - "repos/list-commit-comments-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["commit-comment"][]; - }; - }; - }; - }; - "repos/get-commit-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["commit-comment"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - "repos/delete-commit-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - "repos/update-commit-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["commit-comment"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - requestBody: { - content: { - "application/json": { - /** The contents of the comment */ - body: string; - }; - }; - }; - }; - /** List the reactions to a [commit comment](https://docs.github.com/rest/reference/repos#comments). */ - "reactions/list-for-commit-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - query: { - /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a commit comment. */ - content?: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["reaction"][]; - }; - }; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this commit comment. */ - "reactions/create-for-commit-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Reaction exists */ - 200: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - /** Reaction created */ - 201: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the commit comment. */ - content: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - }; - }; - }; - }; - /** - * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/comments/:comment_id/reactions/:reaction_id`. - * - * Delete a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). - */ - "reactions/delete-for-commit-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - reaction_id: components["parameters"]["reaction-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - "repos/list-commits": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** SHA or branch to start listing commits from. Default: the repository’s default branch (usually `master`). */ - sha?: string; - /** Only commits containing this file path will be returned. */ - path?: string; - /** GitHub login or email address by which to filter by commit author. */ - author?: string; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Only commits before this date will be returned. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - until?: string; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["commit"][]; - }; - }; - 400: components["responses"]["bad_request"]; - 404: components["responses"]["not_found"]; - 409: components["responses"]["conflict"]; - 500: components["responses"]["internal_error"]; - }; - }; - /** - * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Returns all branches where the given commit SHA is the HEAD, or latest commit for the branch. - */ - "repos/list-branches-for-head-commit": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** commit_sha parameter */ - commit_sha: components["parameters"]["commit_sha"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["branch-short"][]; - }; - }; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** Use the `:commit_sha` to specify the commit that will have its comments listed. */ - "repos/list-comments-for-commit": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** commit_sha parameter */ - commit_sha: components["parameters"]["commit_sha"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["commit-comment"][]; - }; - }; - }; - }; - /** - * Create a comment for a commit using its `:commit_sha`. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - "repos/create-commit-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** commit_sha parameter */ - commit_sha: components["parameters"]["commit_sha"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["commit-comment"]; - }; - }; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The contents of the comment. */ - body: string; - /** Relative path of the file to comment on. */ - path?: string; - /** Line index in the diff to comment on. */ - position?: number; - /** **Deprecated**. Use **position** parameter instead. Line number in the file to comment on. */ - line?: number; - }; - }; - }; - }; - /** Lists the merged pull request that introduced the commit to the repository. If the commit is not present in the default branch, additionally returns open pull requests associated with the commit. The results may include open and closed pull requests. Additional preview headers may be required to see certain details for associated pull requests, such as whether a pull request is in a draft state. For more information about previews that might affect this endpoint, see the [List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests) endpoint. */ - "repos/list-pull-requests-associated-with-commit": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** commit_sha parameter */ - commit_sha: components["parameters"]["commit_sha"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["pull-request-simple"][]; - }; - }; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** - * Returns the contents of a single commit reference. You must have `read` access for the repository to use this endpoint. - * - * **Note:** If there are more than 300 files in the commit diff, the response will include pagination link headers for the remaining files, up to a limit of 3000 files. Each page contains the static commit information, and the only changes are to the file listing. - * - * You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch `diff` and `patch` formats. Diffs with binary data will have no `patch` property. - * - * To return only the SHA-1 hash of the commit reference, you can provide the `sha` custom [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) in the `Accept` header. You can use this endpoint to check if a remote reference's SHA-1 hash is the same as your local reference's SHA-1 hash by providing the local SHA-1 reference as the ETag. - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - "repos/get-commit": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** ref parameter */ - ref: string; - }; - query: { - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["commit"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - 500: components["responses"]["internal_error"]; - }; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. - * - * Lists check runs for a commit ref. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. - */ - "checks/list-for-ref": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** ref parameter */ - ref: string; - }; - query: { - /** Returns check runs with the specified `name`. */ - check_name?: components["parameters"]["check_name"]; - /** Returns check runs with the specified `status`. Can be one of `queued`, `in_progress`, or `completed`. */ - status?: components["parameters"]["status"]; - /** Filters check runs by their `completed_at` timestamp. Can be one of `latest` (returning the most recent check runs) or `all`. */ - filter?: "latest" | "all"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - app_id?: number; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - check_runs: components["schemas"]["check-run"][]; - }; - }; - }; - }; - }; - /** - * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. - * - * Lists check suites for a commit `ref`. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to list check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. - */ - "checks/list-suites-for-ref": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** ref parameter */ - ref: string; - }; - query: { - /** Filters check suites by GitHub App `id`. */ - app_id?: number; - /** Returns check runs with the specified `name`. */ - check_name?: components["parameters"]["check_name"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - check_suites: components["schemas"]["check-suite"][]; - }; - }; - }; - }; - }; - /** - * Users with pull access in a repository can access a combined view of commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. - * - * The most recent status for each context is returned, up to 100. This field [paginates](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination) if there are over 100 contexts. - * - * Additionally, a combined `state` is returned. The `state` is one of: - * - * * **failure** if any of the contexts report as `error` or `failure` - * * **pending** if there are no statuses or a context is `pending` - * * **success** if the latest status for all contexts is `success` - */ - "repos/get-combined-status-for-ref": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** ref parameter */ - ref: string; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["combined-commit-status"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Users with pull access in a repository can view commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. Statuses are returned in reverse chronological order. The first status in the list will be the latest one. - * - * This resource is also available via a legacy route: `GET /repos/:owner/:repo/statuses/:ref`. - */ - "repos/list-commit-statuses-for-ref": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** ref parameter */ - ref: string; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["status"][]; - }; - }; - 301: components["responses"]["moved_permanently"]; - }; - }; - /** - * Returns the contents of the repository's code of conduct file, if one is detected. - * - * A code of conduct is detected if there is a file named `CODE_OF_CONDUCT` in the root directory of the repository. GitHub detects which code of conduct it is using fuzzy matching. - */ - "codes-of-conduct/get-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["code-of-conduct"]; - }; - }; - }; - }; - /** - * This endpoint will return all community profile metrics, including an - * overall health score, repository description, the presence of documentation, detected - * code of conduct, detected license, and the presence of ISSUE\_TEMPLATE, PULL\_REQUEST\_TEMPLATE, - * README, and CONTRIBUTING files. - * - * The `health_percentage` score is defined as a percentage of how many of - * these four documents are present: README, CONTRIBUTING, LICENSE, and - * CODE_OF_CONDUCT. For example, if all four documents are present, then - * the `health_percentage` is `100`. If only one is present, then the - * `health_percentage` is `25`. - * - * `content_reports_enabled` is only returned for organization-owned repositories. - */ - "repos/get-community-profile-metrics": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["community-profile"]; - }; - }; - }; - }; - /** - * Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit - * `:path`, you will receive the contents of the repository's root directory. See the description below regarding what the API response includes for directories. - * - * Files and symlinks support [a custom media type](https://docs.github.com/rest/reference/repos#custom-media-types) for - * retrieving the raw content or rendered HTML (when supported). All content types support [a custom media - * type](https://docs.github.com/rest/reference/repos#custom-media-types) to ensure the content is returned in a consistent - * object format. - * - * **Note**: - * * To get a repository's contents recursively, you can [recursively get the tree](https://docs.github.com/rest/reference/git#trees). - * * This API has an upper limit of 1,000 files for a directory. If you need to retrieve more files, use the [Git Trees - * API](https://docs.github.com/rest/reference/git#get-a-tree). - * * This API supports files up to 1 megabyte in size. - * - * #### If the content is a directory - * The response will be an array of objects, one object for each item in the directory. - * When listing the contents of a directory, submodules have their "type" specified as "file". Logically, the value - * _should_ be "submodule". This behavior exists in API v3 [for backwards compatibility purposes](https://git.io/v1YCW). - * In the next major version of the API, the type will be returned as "submodule". - * - * #### If the content is a symlink - * If the requested `:path` points to a symlink, and the symlink's target is a normal file in the repository, then the - * API responds with the content of the file (in the format shown in the example. Otherwise, the API responds with an object - * describing the symlink itself. - * - * #### If the content is a submodule - * The `submodule_git_url` identifies the location of the submodule repository, and the `sha` identifies a specific - * commit within the submodule repository. Git uses the given URL when cloning the submodule repository, and checks out - * the submodule at that specific commit. - * - * If the submodule repository is not hosted on github.com, the Git URLs (`git_url` and `_links["git"]`) and the - * github.com URLs (`html_url` and `_links["html"]`) will have null values. - */ - "repos/get-content": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** path parameter */ - path: string; - }; - query: { - /** The name of the commit/branch/tag. Default: the repository’s default branch (usually `master`) */ - ref?: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/vnd.github.v3.object": components["schemas"]["content-tree"]; - "application/json": components["schemas"]["content-directory"] | components["schemas"]["content-file"] | components["schemas"]["content-symlink"] | components["schemas"]["content-submodule"]; - }; - }; - 302: components["responses"]["found"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Creates a new file or replaces an existing file in a repository. */ - "repos/create-or-update-file-contents": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** path parameter */ - path: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["file-commit"]; - }; - }; - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["file-commit"]; - }; - }; - 404: components["responses"]["not_found"]; - 409: components["responses"]["conflict"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The commit message. */ - message: string; - /** The new file content, using Base64 encoding. */ - content: string; - /** **Required if you are updating a file**. The blob SHA of the file being replaced. */ - sha?: string; - /** The branch name. Default: the repository’s default branch (usually `master`) */ - branch?: string; - /** The person that committed the file. Default: the authenticated user. */ - committer?: { - /** The name of the author or committer of the commit. You'll receive a `422` status code if `name` is omitted. */ - name: string; - /** The email of the author or committer of the commit. You'll receive a `422` status code if `email` is omitted. */ - email: string; - date?: string; - }; - /** The author of the file. Default: The `committer` or the authenticated user if you omit `committer`. */ - author?: { - /** The name of the author or committer of the commit. You'll receive a `422` status code if `name` is omitted. */ - name: string; - /** The email of the author or committer of the commit. You'll receive a `422` status code if `email` is omitted. */ - email: string; - date?: string; - }; - }; - }; - }; - }; - /** - * Deletes a file in a repository. - * - * You can provide an additional `committer` parameter, which is an object containing information about the committer. Or, you can provide an `author` parameter, which is an object containing information about the author. - * - * The `author` section is optional and is filled in with the `committer` information if omitted. If the `committer` information is omitted, the authenticated user's information is used. - * - * You must provide values for both `name` and `email`, whether you choose to use `author` or `committer`. Otherwise, you'll receive a `422` status code. - */ - "repos/delete-file": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** path parameter */ - path: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["file-commit"]; - }; - }; - 404: components["responses"]["not_found"]; - 409: components["responses"]["conflict"]; - 422: components["responses"]["validation_failed"]; - 503: components["responses"]["service_unavailable"]; - }; - requestBody: { - content: { - "application/json": { - /** The commit message. */ - message: string; - /** The blob SHA of the file being replaced. */ - sha: string; - /** The branch name. Default: the repository’s default branch (usually `master`) */ - branch?: string; - /** object containing information about the committer. */ - committer?: { - /** The name of the author (or committer) of the commit */ - name?: string; - /** The email of the author (or committer) of the commit */ - email?: string; - }; - /** object containing information about the author. */ - author?: { - /** The name of the author (or committer) of the commit */ - name?: string; - /** The email of the author (or committer) of the commit */ - email?: string; - }; - }; - }; - }; - }; - /** - * Lists contributors to the specified repository and sorts them by the number of commits per contributor in descending order. This endpoint may return information that is a few hours old because the GitHub REST API v3 caches contributor data to improve performance. - * - * GitHub identifies contributors by author email address. This endpoint groups contribution counts by GitHub user, which includes all associated email addresses. To improve performance, only the first 500 author email addresses in the repository link to GitHub users. The rest will appear as anonymous contributors without associated GitHub user information. - */ - "repos/list-contributors": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Set to `1` or `true` to include anonymous contributors in results. */ - anon?: string; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** if repository contains content */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["contributor"][]; - }; - }; - /** Response if repository is empty */ - 204: never; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Simple filtering of deployments is available via query parameters: */ - "repos/list-deployments": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** The SHA recorded at creation time. */ - sha?: string; - /** The name of the ref. This can be a branch, tag, or SHA. */ - ref?: string; - /** The name of the task for the deployment (e.g., `deploy` or `deploy:migrations`). */ - task?: string; - /** The name of the environment that was deployed to (e.g., `staging` or `production`). */ - environment?: string | null; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["deployment"][]; - }; - }; - }; - }; - /** - * Deployments offer a few configurable parameters with certain defaults. - * - * The `ref` parameter can be any named branch, tag, or SHA. At GitHub we often deploy branches and verify them - * before we merge a pull request. - * - * The `environment` parameter allows deployments to be issued to different runtime environments. Teams often have - * multiple environments for verifying their applications, such as `production`, `staging`, and `qa`. This parameter - * makes it easier to track which environments have requested deployments. The default environment is `production`. - * - * The `auto_merge` parameter is used to ensure that the requested ref is not behind the repository's default branch. If - * the ref _is_ behind the default branch for the repository, we will attempt to merge it for you. If the merge succeeds, - * the API will return a successful merge commit. If merge conflicts prevent the merge from succeeding, the API will - * return a failure response. - * - * By default, [commit statuses](https://docs.github.com/rest/reference/repos#statuses) for every submitted context must be in a `success` - * state. The `required_contexts` parameter allows you to specify a subset of contexts that must be `success`, or to - * specify contexts that have not yet been submitted. You are not required to use commit statuses to deploy. If you do - * not require any contexts or create any commit statuses, the deployment will always succeed. - * - * The `payload` parameter is available for any extra information that a deployment system might need. It is a JSON text - * field that will be passed on when a deployment event is dispatched. - * - * The `task` parameter is used by the deployment system to allow different execution paths. In the web world this might - * be `deploy:migrations` to run schema changes on the system. In the compiled world this could be a flag to compile an - * application with debugging enabled. - * - * Users with `repo` or `repo_deployment` scopes can create a deployment for a given ref. - * - * #### Merged branch response - * You will see this response when GitHub automatically merges the base branch into the topic branch instead of creating - * a deployment. This auto-merge happens when: - * * Auto-merge option is enabled in the repository - * * Topic branch does not include the latest changes on the base branch, which is `master` in the response example - * * There are no merge conflicts - * - * If there are no new commits in the base branch, a new request to create a deployment should give a successful - * response. - * - * #### Merge conflict response - * This error happens when the `auto_merge` option is enabled and when the default branch (in this case `master`), can't - * be merged into the branch that's being deployed (in this case `topic-branch`), due to merge conflicts. - * - * #### Failed commit status checks - * This error happens when the `required_contexts` parameter indicates that one or more contexts need to have a `success` - * status for the commit to be deployed, but one or more of the required contexts do not have a state of `success`. - */ - "repos/create-deployment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["deployment"]; - }; - }; - /** Merged branch response */ - 202: { - content: { - "application/json": { - message?: string; - }; - }; - }; - /** Conflict when there is a merge conflict or the commit's status checks failed */ - 409: unknown; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The ref to deploy. This can be a branch, tag, or SHA. */ - ref: string; - /** Specifies a task to execute (e.g., `deploy` or `deploy:migrations`). */ - task?: string; - /** Attempts to automatically merge the default branch into the requested ref, if it's behind the default branch. */ - auto_merge?: boolean; - /** The [status](https://docs.github.com/rest/reference/repos#statuses) contexts to verify against commit status checks. If you omit this parameter, GitHub verifies all unique contexts before creating a deployment. To bypass checking entirely, pass an empty array. Defaults to all unique contexts. */ - required_contexts?: string[]; - payload?: { - [key: string]: any; - } | string; - /** Name for the target deployment environment (e.g., `production`, `staging`, `qa`). */ - environment?: string; - /** Short description of the deployment. */ - description?: string | null; - /** - * Specifies if the given environment is specific to the deployment and will no longer exist at some point in the future. Default: `false` - * **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. - */ - transient_environment?: boolean; - /** - * Specifies if the given environment is one that end-users directly interact with. Default: `true` when `environment` is `production` and `false` otherwise. - * **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. - */ - production_environment?: boolean; - }; - }; - }; - }; - "repos/get-deployment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** deployment_id parameter */ - deployment_id: components["parameters"]["deployment_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["deployment"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * To ensure there can always be an active deployment, you can only delete an _inactive_ deployment. Anyone with `repo` or `repo_deployment` scopes can delete an inactive deployment. - * - * To set a deployment as inactive, you must: - * - * * Create a new deployment that is active so that the system has a record of the current state, then delete the previously active deployment. - * * Mark the active deployment as inactive by adding any non-successful deployment status. - * - * For more information, see "[Create a deployment](https://docs.github.com/rest/reference/repos/#create-a-deployment)" and "[Create a deployment status](https://docs.github.com/rest/reference/repos#create-a-deployment-status)." - */ - "repos/delete-deployment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** deployment_id parameter */ - deployment_id: components["parameters"]["deployment_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed_simple"]; - }; - }; - /** Users with pull access can view deployment statuses for a deployment: */ - "repos/list-deployment-statuses": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** deployment_id parameter */ - deployment_id: components["parameters"]["deployment_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["deployment-status"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Users with `push` access can create deployment statuses for a given deployment. - * - * GitHub Apps require `read & write` access to "Deployments" and `read-only` access to "Repo contents" (for private repos). OAuth Apps require the `repo_deployment` scope. - */ - "repos/create-deployment-status": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** deployment_id parameter */ - deployment_id: components["parameters"]["deployment_id"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["deployment-status"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The state of the status. Can be one of `error`, `failure`, `inactive`, `in_progress`, `queued` `pending`, or `success`. **Note:** To use the `inactive` state, you must provide the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. To use the `in_progress` and `queued` states, you must provide the [`application/vnd.github.flash-preview+json`](https://docs.github.com/rest/overview/api-previews#deployment-statuses) custom media type. When you set a transient deployment to `inactive`, the deployment will be shown as `destroyed` in GitHub. */ - state: "error" | "failure" | "inactive" | "in_progress" | "queued" | "pending" | "success"; - /** The target URL to associate with this status. This URL should contain output to keep the user updated while the task is running or serve as historical information for what happened in the deployment. **Note:** It's recommended to use the `log_url` parameter, which replaces `target_url`. */ - target_url?: string; - /** - * The full URL of the deployment's output. This parameter replaces `target_url`. We will continue to accept `target_url` to support legacy uses, but we recommend replacing `target_url` with `log_url`. Setting `log_url` will automatically set `target_url` to the same value. Default: `""` - * **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. - */ - log_url?: string; - /** A short description of the status. The maximum description length is 140 characters. */ - description?: string; - /** Name for the target deployment environment, which can be changed when setting a deploy status. For example, `production`, `staging`, or `qa`. **Note:** This parameter requires you to use the [`application/vnd.github.flash-preview+json`](https://docs.github.com/rest/overview/api-previews#deployment-statuses) custom media type. */ - environment?: "production" | "staging" | "qa"; - /** - * Sets the URL for accessing your environment. Default: `""` - * **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. - */ - environment_url?: string; - /** - * Adds a new `inactive` status to all prior non-transient, non-production environment deployments with the same repository and `environment` name as the created status's deployment. An `inactive` status is only added to deployments that had a `success` state. Default: `true` - * **Note:** To add an `inactive` status to `production` environments, you must use the [`application/vnd.github.flash-preview+json`](https://docs.github.com/rest/overview/api-previews#deployment-statuses) custom media type. - * **Note:** This parameter requires you to use the [`application/vnd.github.ant-man-preview+json`](https://docs.github.com/rest/overview/api-previews#enhanced-deployments) custom media type. - */ - auto_inactive?: boolean; - }; - }; - }; - }; - /** Users with pull access can view a deployment status for a deployment: */ - "repos/get-deployment-status": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** deployment_id parameter */ - deployment_id: components["parameters"]["deployment_id"]; - status_id: number; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["deployment-status"]; - }; - }; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** - * You can use this endpoint to trigger a webhook event called `repository_dispatch` when you want activity that happens outside of GitHub to trigger a GitHub Actions workflow or GitHub App webhook. You must configure your GitHub Actions workflow or GitHub App to run when the `repository_dispatch` event occurs. For an example `repository_dispatch` webhook payload, see "[RepositoryDispatchEvent](https://docs.github.com/webhooks/event-payloads/#repository_dispatch)." - * - * The `client_payload` parameter is available for any extra information that your workflow might need. This parameter is a JSON payload that will be passed on when the webhook event is dispatched. For example, the `client_payload` can include a message that a user would like to send using a GitHub Actions workflow. Or the `client_payload` can be used as a test to debug your workflow. - * - * This endpoint requires write access to the repository by providing either: - * - * - Personal access tokens with `repo` scope. For more information, see "[Creating a personal access token for the command line](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line)" in the GitHub Help documentation. - * - GitHub Apps with both `metadata:read` and `contents:read&write` permissions. - * - * This input example shows how you can use the `client_payload` as a test to debug your workflow. - */ - "repos/create-dispatch-event": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** A custom webhook event name. */ - event_type: string; - /** JSON payload with extra information about the webhook event that your action or worklow may use. */ - client_payload?: { - [key: string]: any; - }; - }; - }; - }; - }; - /** - * Get all environments for a repository. - * - * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. - */ - "repos/get-all-environments": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - /** The number of environments in this repository */ - total_count?: number; - environments?: components["schemas"]["environment"][]; - }; - }; - }; - }; - }; - /** Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ - "repos/get-environment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the environment */ - environment_name: components["parameters"]["environment_name"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["environment"]; - }; - }; - }; - }; - /** - * Create or update an environment with protection rules, such as required reviewers. For more information about environment protection rules, see "[Environments](/actions/reference/environments#environment-protection-rules)." - * - * **Note:** Although you can use this operation to specify that only branches that match specified name patterns can deploy to this environment, you must use the UI to set the name patterns. For more information, see "[Environments](/actions/reference/environments#deployment-branches)." - * - * **Note:** To create or update secrets for an environment, see "[Secrets](/rest/reference/actions#secrets)." - * - * You must authenticate using an access token with the repo scope to use this endpoint. - */ - "repos/create-or-update-environment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the environment */ - environment_name: components["parameters"]["environment_name"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["environment"]; - }; - }; - /** Validation error when the environment name is invalid or when `protected_branches` and `custom_branch_policies` in `deployment_branch_policy` are set to the same value */ - 422: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - wait_timer?: components["schemas"]["wait-timer"]; - /** The people or teams that may review jobs that reference the environment. You can list up to six users or teams as reviewers. The reviewers must have at least read access to the repository. Only one of the required reviewers needs to approve the job for it to proceed. */ - reviewers?: { - type?: components["schemas"]["deployment-reviewer-type"]; - /** The id of the user or team who can review the deployment */ - id?: number; - }[] | null; - deployment_branch_policy?: components["schemas"]["deployment_branch_policy"]; - } | null; - }; - }; - }; - /** You must authenticate using an access token with the repo scope to use this endpoint. */ - "repos/delete-an-environment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The name of the environment */ - environment_name: components["parameters"]["environment_name"]; - }; - }; - responses: { - /** Default response */ - 204: never; - }; - }; - "activity/list-repo-events": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["event"][]; - }; - }; - }; - }; - "repos/list-forks": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** The sort order. Can be either `newest`, `oldest`, or `stargazers`. */ - sort?: "newest" | "oldest" | "stargazers" | "watchers"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["minimal-repository"][]; - }; - }; - 400: components["responses"]["bad_request"]; - }; - }; - /** - * Create a fork for the authenticated user. - * - * **Note**: Forking a Repository happens asynchronously. You may have to wait a short period of time before you can access the git objects. If this takes longer than 5 minutes, be sure to contact [GitHub Support](https://support.github.com/contact) or [GitHub Premium Support](https://premium.githubsupport.com). - */ - "repos/create-fork": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 202: { - content: { - "application/json": components["schemas"]["full-repository"]; - }; - }; - 400: components["responses"]["bad_request"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Optional parameter to specify the organization name if forking into an organization. */ - organization?: string; - } | null; - }; - }; - }; - "git/create-blob": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["short-blob"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 409: components["responses"]["conflict"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The new blob's content. */ - content: string; - /** The encoding used for `content`. Currently, `"utf-8"` and `"base64"` are supported. */ - encoding?: string; - }; - }; - }; - }; - /** - * The `content` in the response will always be Base64 encoded. - * - * _Note_: This API supports blobs up to 100 megabytes in size. - */ - "git/get-blob": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - file_sha: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["blob"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** - * Creates a new Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - "git/create-commit": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["git-commit"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The commit message */ - message: string; - /** The SHA of the tree object this commit points to */ - tree: string; - /** The SHAs of the commits that were the parents of this commit. If omitted or empty, the commit will be written as a root commit. For a single parent, an array of one SHA should be provided; for a merge commit, an array of more than one should be provided. */ - parents?: string[]; - /** Information about the author of the commit. By default, the `author` will be the authenticated user and the current date. See the `author` and `committer` object below for details. */ - author?: { - /** The name of the author (or committer) of the commit */ - name: string; - /** The email of the author (or committer) of the commit */ - email: string; - /** Indicates when this commit was authored (or committed). This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - date?: string; - }; - /** Information about the person who is making the commit. By default, `committer` will use the information set in `author`. See the `author` and `committer` object below for details. */ - committer?: { - /** The name of the author (or committer) of the commit */ - name?: string; - /** The email of the author (or committer) of the commit */ - email?: string; - /** Indicates when this commit was authored (or committed). This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - date?: string; - }; - /** The [PGP signature](https://en.wikipedia.org/wiki/Pretty_Good_Privacy) of the commit. GitHub adds the signature to the `gpgsig` header of the created commit. For a commit signature to be verifiable by Git or GitHub, it must be an ASCII-armored detached PGP signature over the string commit as it would be written to the object database. To pass a `signature` parameter, you need to first manually create a valid PGP signature, which can be complicated. You may find it easier to [use the command line](https://git-scm.com/book/id/v2/Git-Tools-Signing-Your-Work) to create signed commits. */ - signature?: string; - }; - }; - }; - }; - /** - * Gets a Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - "git/get-commit": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** commit_sha parameter */ - commit_sha: components["parameters"]["commit_sha"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["git-commit"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Returns an array of references from your Git database that match the supplied name. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't exist in the repository, but existing refs start with `:ref`, they will be returned as an array. - * - * When you use this endpoint without providing a `:ref`, it will return an array of all the references from your Git database, including notes and stashes if they exist on the server. Anything in the namespace is returned, not just `heads` and `tags`. - * - * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". - * - * If you request matching references for a branch named `feature` but the branch `feature` doesn't exist, the response can still include other matching head refs that start with the word `feature`, such as `featureA` and `featureB`. - */ - "git/list-matching-refs": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** ref parameter */ - ref: string; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["git-ref"][]; - }; - }; - }; - }; - /** - * Returns a single reference from your Git database. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't match an existing ref, a `404` is returned. - * - * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". - */ - "git/get-ref": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** ref parameter */ - ref: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["git-ref"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** Creates a reference for your repository. You are unable to create new references for empty repositories, even if the commit SHA-1 hash used exists. Empty repositories are repositories without branches. */ - "git/create-ref": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["git-ref"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The name of the fully qualified reference (ie: `refs/heads/master`). If it doesn't start with 'refs' and have at least two slashes, it will be rejected. */ - ref: string; - /** The SHA1 value for this reference. */ - sha: string; - key?: string; - }; - }; - }; - }; - "git/delete-ref": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** ref parameter */ - ref: string; - }; - }; - responses: { - /** Response */ - 204: never; - 422: components["responses"]["validation_failed"]; - }; - }; - "git/update-ref": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** ref parameter */ - ref: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["git-ref"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The SHA1 value to set this reference to */ - sha: string; - /** Indicates whether to force the update or to make sure the update is a fast-forward update. Leaving this out or setting it to `false` will make sure you're not overwriting work. */ - force?: boolean; - }; - }; - }; - }; - /** - * Note that creating a tag object does not create the reference that makes a tag in Git. If you want to create an annotated tag in Git, you have to do this call to create the tag object, and then [create](https://docs.github.com/rest/reference/git#create-a-reference) the `refs/tags/[tag]` reference. If you want to create a lightweight tag, you only have to [create](https://docs.github.com/rest/reference/git#create-a-reference) the tag reference - this call would be unnecessary. - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - "git/create-tag": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["git-tag"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The tag's name. This is typically a version (e.g., "v0.0.1"). */ - tag: string; - /** The tag message. */ - message: string; - /** The SHA of the git object this is tagging. */ - object: string; - /** The type of the object we're tagging. Normally this is a `commit` but it can also be a `tree` or a `blob`. */ - type: "commit" | "tree" | "blob"; - /** An object with information about the individual creating the tag. */ - tagger?: { - /** The name of the author of the tag */ - name: string; - /** The email of the author of the tag */ - email: string; - /** When this object was tagged. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - date?: string; - }; - }; - }; - }; - }; - /** - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - "git/get-tag": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - tag_sha: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["git-tag"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * The tree creation API accepts nested entries. If you specify both a tree and a nested path modifying that tree, this endpoint will overwrite the contents of the tree with the new path contents, and create a new tree structure. - * - * If you use this endpoint to add, delete, or modify the file contents in a tree, you will need to commit the tree and then update a branch to point to the commit. For more information see "[Create a commit](https://docs.github.com/rest/reference/git#create-a-commit)" and "[Update a reference](https://docs.github.com/rest/reference/git#update-a-reference)." - */ - "git/create-tree": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["git-tree"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Objects (of `path`, `mode`, `type`, and `sha`) specifying a tree structure. */ - tree: { - /** The file referenced in the tree. */ - path?: string; - /** The file mode; one of `100644` for file (blob), `100755` for executable (blob), `040000` for subdirectory (tree), `160000` for submodule (commit), or `120000` for a blob that specifies the path of a symlink. */ - mode?: "100644" | "100755" | "040000" | "160000" | "120000"; - /** Either `blob`, `tree`, or `commit`. */ - type?: "blob" | "tree" | "commit"; - /** - * The SHA1 checksum ID of the object in the tree. Also called `tree.sha`. If the value is `null` then the file will be deleted. - * - * **Note:** Use either `tree.sha` or `content` to specify the contents of the entry. Using both `tree.sha` and `content` will return an error. - */ - sha?: string | null; - /** - * The content you want this file to have. GitHub will write this blob out and use that SHA for this entry. Use either this, or `tree.sha`. - * - * **Note:** Use either `tree.sha` or `content` to specify the contents of the entry. Using both `tree.sha` and `content` will return an error. - */ - content?: string; - }[]; - /** - * The SHA1 of an existing Git tree object which will be used as the base for the new tree. If provided, a new Git tree object will be created from entries in the Git tree object pointed to by `base_tree` and entries defined in the `tree` parameter. Entries defined in the `tree` parameter will overwrite items from `base_tree` with the same `path`. If you're creating new changes on a branch, then normally you'd set `base_tree` to the SHA1 of the Git tree object of the current latest commit on the branch you're working on. - * If not provided, GitHub will create a new Git tree object from only the entries defined in the `tree` parameter. If you create a new commit pointing to such a tree, then all files which were a part of the parent commit's tree and were not defined in the `tree` parameter will be listed as deleted by the new commit. - */ - base_tree?: string; - }; - }; - }; - }; - /** - * Returns a single tree using the SHA1 value for that tree. - * - * If `truncated` is `true` in the response then the number of items in the `tree` array exceeded our maximum limit. If you need to fetch more items, use the non-recursive method of fetching trees, and fetch one sub-tree at a time. - */ - "git/get-tree": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - tree_sha: string; - }; - query: { - /** Setting this parameter to any value returns the objects or subtrees referenced by the tree specified in `:tree_sha`. For example, setting `recursive` to any of the following will enable returning objects or subtrees: `0`, `1`, `"true"`, and `"false"`. Omit this parameter to prevent recursively returning objects or subtrees. */ - recursive?: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["git-tree"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - "repos/list-webhooks": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["hook"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Repositories can have multiple webhooks installed. Each webhook should have a unique `config`. Multiple webhooks can - * share the same `config` as long as those webhooks do not have any `events` that overlap. - */ - "repos/create-webhook": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["hook"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Use `web` to create a webhook. Default: `web`. This parameter only accepts the value `web`. */ - name?: string; - /** Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/repos#create-hook-config-params). */ - config?: { - url?: components["schemas"]["webhook-config-url"]; - content_type?: components["schemas"]["webhook-config-content-type"]; - secret?: components["schemas"]["webhook-config-secret"]; - insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; - token?: string; - digest?: string; - }; - /** Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. */ - events?: string[]; - /** Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. */ - active?: boolean; - } | null; - }; - }; - }; - /** Returns a webhook configured in a repository. To get only the webhook `config` properties, see "[Get a webhook configuration for a repository](/rest/reference/repos#get-a-webhook-configuration-for-a-repository)." */ - "repos/get-webhook": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["hook"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - "repos/delete-webhook": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** Updates a webhook configured in a repository. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for a repository](/rest/reference/repos#update-a-webhook-configuration-for-a-repository)." */ - "repos/update-webhook": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["hook"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/repos#create-hook-config-params). */ - config?: { - url: components["schemas"]["webhook-config-url"]; - content_type?: components["schemas"]["webhook-config-content-type"]; - secret?: components["schemas"]["webhook-config-secret"]; - insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; - address?: string; - room?: string; - }; - /** Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. This replaces the entire array of events. */ - events?: string[]; - /** Determines a list of events to be added to the list of events that the Hook triggers for. */ - add_events?: string[]; - /** Determines a list of events to be removed from the list of events that the Hook triggers for. */ - remove_events?: string[]; - /** Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. */ - active?: boolean; - }; - }; - }; - }; - /** - * Returns the webhook configuration for a repository. To get more information about the webhook, including the `active` state and `events`, use "[Get a repository webhook](/rest/reference/orgs#get-a-repository-webhook)." - * - * Access tokens must have the `read:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:read` permission. - */ - "repos/get-webhook-config-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["webhook-config"]; - }; - }; - }; - }; - /** - * Updates the webhook configuration for a repository. To update more information about the webhook, including the `active` state and `events`, use "[Update a repository webhook](/rest/reference/orgs#update-a-repository-webhook)." - * - * Access tokens must have the `write:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:write` permission. - */ - "repos/update-webhook-config-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["webhook-config"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - url?: components["schemas"]["webhook-config-url"]; - content_type?: components["schemas"]["webhook-config-content-type"]; - secret?: components["schemas"]["webhook-config-secret"]; - insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; - }; - }; - }; - }; - /** This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. */ - "repos/ping-webhook": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** - * This will trigger the hook with the latest push to the current repository if the hook is subscribed to `push` events. If the hook is not subscribed to `push` events, the server will respond with 204 but no test POST will be generated. - * - * **Note**: Previously `/repos/:owner/:repo/hooks/:hook_id/test` - */ - "repos/test-push-webhook": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - hook_id: components["parameters"]["hook-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** - * View the progress of an import. - * - * **Import status** - * - * This section includes details about the possible values of the `status` field of the Import Progress response. - * - * An import that does not have errors will progress through these steps: - * - * * `detecting` - the "detection" step of the import is in progress because the request did not include a `vcs` parameter. The import is identifying the type of source control present at the URL. - * * `importing` - the "raw" step of the import is in progress. This is where commit data is fetched from the original repository. The import progress response will include `commit_count` (the total number of raw commits that will be imported) and `percent` (0 - 100, the current progress through the import). - * * `mapping` - the "rewrite" step of the import is in progress. This is where SVN branches are converted to Git branches, and where author updates are applied. The import progress response does not include progress information. - * * `pushing` - the "push" step of the import is in progress. This is where the importer updates the repository on GitHub. The import progress response will include `push_percent`, which is the percent value reported by `git push` when it is "Writing objects". - * * `complete` - the import is complete, and the repository is ready on GitHub. - * - * If there are problems, you will see one of these in the `status` field: - * - * * `auth_failed` - the import requires authentication in order to connect to the original repository. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. - * * `error` - the import encountered an error. The import progress response will include the `failed_step` and an error message. Contact [GitHub Support](https://support.github.com/contact) or [GitHub Premium Support](https://premium.githubsupport.com) for more information. - * * `detection_needs_auth` - the importer requires authentication for the originating repository to continue detection. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. - * * `detection_found_nothing` - the importer didn't recognize any source control at the URL. To resolve, [Cancel the import](https://docs.github.com/rest/reference/migrations#cancel-an-import) and [retry](https://docs.github.com/rest/reference/migrations#start-an-import) with the correct URL. - * * `detection_found_multiple` - the importer found several projects or repositories at the provided URL. When this is the case, the Import Progress response will also include a `project_choices` field with the possible project choices as values. To update project choice, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. - * - * **The project_choices field** - * - * When multiple projects are found at the provided URL, the response hash will include a `project_choices` field, the value of which is an array of hashes each representing a project choice. The exact key/value pairs of the project hashes will differ depending on the version control type. - * - * **Git LFS related fields** - * - * This section includes details about Git LFS related fields that may be present in the Import Progress response. - * - * * `use_lfs` - describes whether the import has been opted in or out of using Git LFS. The value can be `opt_in`, `opt_out`, or `undecided` if no action has been taken. - * * `has_large_files` - the boolean value describing whether files larger than 100MB were found during the `importing` step. - * * `large_files_size` - the total size in gigabytes of files larger than 100MB found in the originating repository. - * * `large_files_count` - the total number of files larger than 100MB found in the originating repository. To see a list of these files, make a "Get Large Files" request. - */ - "migrations/get-import-status": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["import"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** Start a source import to a GitHub repository using GitHub Importer. */ - "migrations/start-import": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["import"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The URL of the originating repository. */ - vcs_url: string; - /** The originating VCS type. Can be one of `subversion`, `git`, `mercurial`, or `tfvc`. Please be aware that without this parameter, the import job will take additional time to detect the VCS type before beginning the import. This detection step will be reflected in the response. */ - vcs?: "subversion" | "git" | "mercurial" | "tfvc"; - /** If authentication is required, the username to provide to `vcs_url`. */ - vcs_username?: string; - /** If authentication is required, the password to provide to `vcs_url`. */ - vcs_password?: string; - /** For a tfvc import, the name of the project that is being imported. */ - tfvc_project?: string; - }; - }; - }; - }; - /** Stop an import for a repository. */ - "migrations/cancel-import": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * An import can be updated with credentials or a project choice by passing in the appropriate parameters in this API - * request. If no parameters are provided, the import will be restarted. - */ - "migrations/update-import": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["import"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The username to provide to the originating repository. */ - vcs_username?: string; - /** The password to provide to the originating repository. */ - vcs_password?: string; - vcs?: string; - tfvc_project?: string; - } | null; - }; - }; - }; - /** - * Each type of source control system represents authors in a different way. For example, a Git commit author has a display name and an email address, but a Subversion commit author just has a username. The GitHub Importer will make the author information valid, but the author might not be correct. For example, it will change the bare Subversion username `hubot` into something like `hubot `. - * - * This endpoint and the [Map a commit author](https://docs.github.com/rest/reference/migrations#map-a-commit-author) endpoint allow you to provide correct Git author information. - */ - "migrations/get-commit-authors": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** A user ID. Only return users with an ID greater than this ID. */ - since?: components["parameters"]["since-user"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["porter-author"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** Update an author's identity for the import. Your application can continue updating authors any time before you push new commits to the repository. */ - "migrations/map-commit-author": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - author_id: number; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["porter-author"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The new Git author email. */ - email?: string; - /** The new Git author name. */ - name?: string; - }; - }; - }; - }; - /** List files larger than 100MB found during the import */ - "migrations/get-large-files": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["porter-large-file"][]; - }; - }; - }; - }; - /** You can import repositories from Subversion, Mercurial, and TFS that include files larger than 100MB. This ability is powered by [Git LFS](https://git-lfs.github.com). You can learn more about our LFS feature and working with large files [on our help site](https://help.github.com/articles/versioning-large-files/). */ - "migrations/set-lfs-preference": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["import"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Can be one of `opt_in` (large files will be stored using Git LFS) or `opt_out` (large files will be removed during the import). */ - use_lfs: "opt_in" | "opt_out"; - }; - }; - }; - }; - /** - * Enables an authenticated GitHub App to find the repository's installation information. The installation's account type will be either an organization or a user account, depending which account the repository belongs to. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - "apps/get-repo-installation": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["installation"]; - }; - }; - 301: components["responses"]["moved_permanently"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Shows which type of GitHub user can interact with this repository and when the restriction expires. If there are no restrictions, you will see an empty response. */ - "interactions/get-restrictions-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": Partial & Partial<{ - [key: string]: any; - }>; - }; - }; - }; - }; - /** Temporarily restricts interactions to a certain type of GitHub user within the given repository. You must have owner or admin access to set these restrictions. If an interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. */ - "interactions/set-restrictions-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["interaction-limit-response"]; - }; - }; - /** Response */ - 409: unknown; - }; - requestBody: { - content: { - "application/json": components["schemas"]["interaction-limit"]; - }; - }; - }; - /** Removes all interaction restrictions from the given repository. You must have owner or admin access to remove restrictions. If the interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. */ - "interactions/remove-restrictions-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - /** Response */ - 409: unknown; - }; - }; - /** When authenticating as a user with admin rights to a repository, this endpoint will list all currently open repository invitations. */ - "repos/list-invitations": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["repository-invitation"][]; - }; - }; - }; - }; - "repos/delete-invitation": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** invitation_id parameter */ - invitation_id: components["parameters"]["invitation_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - "repos/update-invitation": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** invitation_id parameter */ - invitation_id: components["parameters"]["invitation_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["repository-invitation"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The permissions that the associated user will have on the repository. Valid values are `read`, `write`, `maintain`, `triage`, and `admin`. */ - permissions?: "read" | "write" | "maintain" | "triage" | "admin"; - }; - }; - }; - }; - /** - * List issues in a repository. - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - "issues/list-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** If an `integer` is passed, it should refer to a milestone by its `number` field. If the string `*` is passed, issues with any milestone are accepted. If the string `none` is passed, issues without milestones are returned. */ - milestone?: string; - /** Indicates the state of the issues to return. Can be either `open`, `closed`, or `all`. */ - state?: "open" | "closed" | "all"; - /** Can be the name of a user. Pass in `none` for issues with no assigned user, and `*` for issues assigned to any user. */ - assignee?: string; - /** The user that created the issue. */ - creator?: string; - /** A user that's mentioned in the issue. */ - mentioned?: string; - /** A list of comma separated label names. Example: `bug,ui,@high` */ - labels?: components["parameters"]["labels"]; - /** What to sort results by. Can be either `created`, `updated`, `comments`. */ - sort?: "created" | "updated" | "comments"; - /** One of `asc` (ascending) or `desc` (descending). */ - direction?: components["parameters"]["direction"]; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["issue-simple"][]; - }; - }; - 301: components["responses"]["moved_permanently"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** - * Any user with pull access to a repository can create an issue. If [issues are disabled in the repository](https://help.github.com/articles/disabling-issues/), the API returns a `410 Gone` status. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details. - */ - "issues/create": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["issue"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - 422: components["responses"]["validation_failed"]; - 503: components["responses"]["service_unavailable"]; - }; - requestBody: { - content: { - "application/json": { - /** The title of the issue. */ - title: string | number; - /** The contents of the issue. */ - body?: string; - /** Login for the user that this issue should be assigned to. _NOTE: Only users with push access can set the assignee for new issues. The assignee is silently dropped otherwise. **This field is deprecated.**_ */ - assignee?: string | null; - milestone?: (string | number) | null; - /** Labels to associate with this issue. _NOTE: Only users with push access can set labels for new issues. Labels are silently dropped otherwise._ */ - labels?: (string | { - id?: number; - name?: string; - description?: string | null; - color?: string | null; - })[]; - /** Logins for Users to assign to this issue. _NOTE: Only users with push access can set assignees for new issues. Assignees are silently dropped otherwise._ */ - assignees?: string[]; - }; - }; - }; - }; - /** By default, Issue Comments are ordered by ascending ID. */ - "issues/list-comments-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** One of `created` (when the repository was starred) or `updated` (when it was last pushed to). */ - sort?: components["parameters"]["sort"]; - /** Either `asc` or `desc`. Ignored without the `sort` parameter. */ - direction?: "asc" | "desc"; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["issue-comment"][]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - "issues/get-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["issue-comment"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - "issues/delete-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - "issues/update-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["issue-comment"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The contents of the comment. */ - body: string; - }; - }; - }; - }; - /** List the reactions to an [issue comment](https://docs.github.com/rest/reference/issues#comments). */ - "reactions/list-for-issue-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - query: { - /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to an issue comment. */ - content?: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["reaction"][]; - }; - }; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this issue comment. */ - "reactions/create-for-issue-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Reaction exists */ - 200: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - /** Reaction created */ - 201: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the issue comment. */ - content: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - }; - }; - }; - }; - /** - * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/issues/comments/:comment_id/reactions/:reaction_id`. - * - * Delete a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). - */ - "reactions/delete-for-issue-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - reaction_id: components["parameters"]["reaction-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - "issues/list-events-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["issue-event"][]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - }; - "issues/get-event": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - event_id: number; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["issue-event"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - }; - }; - /** - * The API returns a [`301 Moved Permanently` status](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-redirects-redirects) if the issue was - * [transferred](https://help.github.com/articles/transferring-an-issue-to-another-repository/) to another repository. If - * the issue was transferred to or deleted from a repository where the authenticated user lacks read access, the API - * returns a `404 Not Found` status. If the issue was deleted from a repository where the authenticated user has read - * access, the API returns a `410 Gone` status. To receive webhook events for transferred and deleted issues, subscribe - * to the [`issues`](https://docs.github.com/webhooks/event-payloads/#issues) webhook. - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - "issues/get": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["issue"]; - }; - }; - 301: components["responses"]["moved_permanently"]; - 304: components["responses"]["not_modified"]; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - }; - }; - /** Issue owners and users with push access can edit an issue. */ - "issues/update": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["issue"]; - }; - }; - 301: components["responses"]["moved_permanently"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - 422: components["responses"]["validation_failed"]; - 503: components["responses"]["service_unavailable"]; - }; - requestBody: { - content: { - "application/json": { - /** The title of the issue. */ - title?: (string | number) | null; - /** The contents of the issue. */ - body?: string | null; - /** Login for the user that this issue should be assigned to. **This field is deprecated.** */ - assignee?: string | null; - /** State of the issue. Either `open` or `closed`. */ - state?: "open" | "closed"; - milestone?: (string | number) | null; - /** Labels to associate with this issue. Pass one or more Labels to _replace_ the set of Labels on this Issue. Send an empty array (`[]`) to clear all Labels from the Issue. _NOTE: Only users with push access can set labels for issues. Labels are silently dropped otherwise._ */ - labels?: (string | { - id?: number; - name?: string; - description?: string | null; - color?: string | null; - })[]; - /** Logins for Users to assign to this issue. Pass one or more user logins to _replace_ the set of assignees on this Issue. Send an empty array (`[]`) to clear all assignees from the Issue. _NOTE: Only users with push access can set assignees for new issues. Assignees are silently dropped otherwise._ */ - assignees?: string[]; - }; - }; - }; - }; - /** Adds up to 10 assignees to an issue. Users already assigned to an issue are not replaced. */ - "issues/add-assignees": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["issue-simple"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** Usernames of people to assign this issue to. _NOTE: Only users with push access can add assignees to an issue. Assignees are silently ignored otherwise._ */ - assignees?: string[]; - }; - }; - }; - }; - /** Removes one or more assignees from an issue. */ - "issues/remove-assignees": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["issue-simple"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** Usernames of assignees to remove from an issue. _NOTE: Only users with push access can remove assignees from an issue. Assignees are silently ignored otherwise._ */ - assignees?: string[]; - }; - }; - }; - }; - /** Issue Comments are ordered by ascending ID. */ - "issues/list-comments": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - query: { - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["issue-comment"][]; - }; - }; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - }; - }; - /** This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details. */ - "issues/create-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["issue-comment"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The contents of the comment. */ - body: string; - }; - }; - }; - }; - "issues/list-events": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["issue-event-for-issue"][]; - }; - }; - 410: components["responses"]["gone"]; - }; - }; - "issues/list-labels-on-issue": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["label"][]; - }; - }; - 410: components["responses"]["gone"]; - }; - }; - /** Removes any previous labels and sets the new labels for an issue. */ - "issues/set-labels": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["label"][]; - }; - }; - 410: components["responses"]["gone"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": Partial<{ - /** The names of the labels to add to the issue. You can pass an empty array to remove all labels. **Note:** Alternatively, you can pass a single label as a `string` or an `array` of labels directly, but GitHub recommends passing an object with the `labels` key. */ - labels?: string[]; - }> & Partial<{ - labels?: { - name: string; - }[]; - }>; - }; - }; - }; - "issues/add-labels": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["label"][]; - }; - }; - 410: components["responses"]["gone"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The names of the labels to add to the issue. You can pass an empty array to remove all labels. **Note:** Alternatively, you can pass a single label as a `string` or an `array` of labels directly, but GitHub recommends passing an object with the `labels` key. */ - labels?: string[]; - } | { - labels?: { - name: string; - }[]; - }; - }; - }; - }; - "issues/remove-all-labels": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - }; - responses: { - /** Response */ - 204: never; - 410: components["responses"]["gone"]; - }; - }; - /** Removes the specified label from the issue, and returns the remaining labels on the issue. This endpoint returns a `404 Not Found` status if the label does not exist. */ - "issues/remove-label": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - name: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["label"][]; - }; - }; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - }; - }; - /** - * Users with push access can lock an issue or pull request's conversation. - * - * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - "issues/lock": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - }; - responses: { - /** Response */ - 204: never; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** - * The reason for locking the issue or pull request conversation. Lock will fail if you don't use one of these reasons: - * \* `off-topic` - * \* `too heated` - * \* `resolved` - * \* `spam` - */ - lock_reason?: "off-topic" | "too heated" | "resolved" | "spam"; - } | null; - }; - }; - }; - /** Users with push access can unlock an issue's conversation. */ - "issues/unlock": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - }; - responses: { - /** Response */ - 204: never; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** List the reactions to an [issue](https://docs.github.com/rest/reference/issues). */ - "reactions/list-for-issue": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - query: { - /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to an issue. */ - content?: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["reaction"][]; - }; - }; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with a `Status: 200 OK` means that you already added the reaction type to this issue. */ - "reactions/create-for-issue": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the issue. */ - content: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - }; - }; - }; - }; - /** - * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/issues/:issue_number/reactions/:reaction_id`. - * - * Delete a reaction to an [issue](https://docs.github.com/rest/reference/issues/). - */ - "reactions/delete-for-issue": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - reaction_id: components["parameters"]["reaction-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - "issues/list-events-for-timeline": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** issue_number parameter */ - issue_number: components["parameters"]["issue_number"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["issue-event-for-issue"][]; - }; - }; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - "repos/list-deploy-keys": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["deploy-key"][]; - }; - }; - }; - }; - /** You can create a read-only deploy key. */ - "repos/create-deploy-key": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["deploy-key"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** A name for the key. */ - title?: string; - /** The contents of the key. */ - key: string; - /** - * If `true`, the key will only be able to read repository contents. Otherwise, the key will be able to read and write. - * - * Deploy keys with write access can perform the same actions as an organization member with admin access, or a collaborator on a personal repository. For more information, see "[Repository permission levels for an organization](https://help.github.com/articles/repository-permission-levels-for-an-organization/)" and "[Permission levels for a user account repository](https://help.github.com/articles/permission-levels-for-a-user-account-repository/)." - */ - read_only?: boolean; - }; - }; - }; - }; - "repos/get-deploy-key": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** key_id parameter */ - key_id: components["parameters"]["key_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["deploy-key"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** Deploy keys are immutable. If you need to update a key, remove the key and create a new one instead. */ - "repos/delete-deploy-key": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** key_id parameter */ - key_id: components["parameters"]["key_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - "issues/list-labels-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["label"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - "issues/create-label": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["label"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The name of the label. Emoji can be added to label names, using either native emoji or colon-style markup. For example, typing `:strawberry:` will render the emoji ![:strawberry:](https://github.githubassets.com/images/icons/emoji/unicode/1f353.png ":strawberry:"). For a full list of available emoji and codes, see "[Emoji cheat sheet](https://github.com/ikatyang/emoji-cheat-sheet)." */ - name: string; - /** The [hexadecimal color code](http://www.color-hex.com/) for the label, without the leading `#`. */ - color?: string; - /** A short description of the label. */ - description?: string; - }; - }; - }; - }; - "issues/get-label": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - name: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["label"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - "issues/delete-label": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - name: string; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - "issues/update-label": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - name: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["label"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The new name of the label. Emoji can be added to label names, using either native emoji or colon-style markup. For example, typing `:strawberry:` will render the emoji ![:strawberry:](https://github.githubassets.com/images/icons/emoji/unicode/1f353.png ":strawberry:"). For a full list of available emoji and codes, see "[Emoji cheat sheet](https://github.com/ikatyang/emoji-cheat-sheet)." */ - new_name?: string; - /** The [hexadecimal color code](http://www.color-hex.com/) for the label, without the leading `#`. */ - color?: string; - /** A short description of the label. */ - description?: string; - }; - }; - }; - }; - /** Lists languages for the specified repository. The value shown for each language is the number of bytes of code written in that language. */ - "repos/list-languages": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["language"]; - }; - }; - }; - }; - /** - * This method returns the contents of the repository's license file, if one is detected. - * - * Similar to [Get repository content](https://docs.github.com/rest/reference/repos#get-repository-content), this method also supports [custom media types](https://docs.github.com/rest/overview/media-types) for retrieving the raw license content or rendered license HTML. - */ - "licenses/get-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["license-content"]; - }; - }; - }; - }; - "repos/merge": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Successful Response (The resulting merge commit) */ - 201: { - content: { - "application/json": components["schemas"]["commit"]; - }; - }; - /** Response when already merged */ - 204: never; - 403: components["responses"]["forbidden"]; - /** Not Found when the base or head does not exist */ - 404: unknown; - /** Conflict when there is a merge conflict */ - 409: unknown; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The name of the base branch that the head will be merged into. */ - base: string; - /** The head to merge. This can be a branch name or a commit SHA1. */ - head: string; - /** Commit message to use for the merge commit. If omitted, a default message will be used. */ - commit_message?: string; - }; - }; - }; - }; - "issues/list-milestones": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** The state of the milestone. Either `open`, `closed`, or `all`. */ - state?: "open" | "closed" | "all"; - /** What to sort results by. Either `due_on` or `completeness`. */ - sort?: "due_on" | "completeness"; - /** The direction of the sort. Either `asc` or `desc`. */ - direction?: "asc" | "desc"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["milestone"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - "issues/create-milestone": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["milestone"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The title of the milestone. */ - title: string; - /** The state of the milestone. Either `open` or `closed`. */ - state?: "open" | "closed"; - /** A description of the milestone. */ - description?: string; - /** The milestone due date. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - due_on?: string; - }; - }; - }; - }; - "issues/get-milestone": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** milestone_number parameter */ - milestone_number: components["parameters"]["milestone_number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["milestone"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - "issues/delete-milestone": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** milestone_number parameter */ - milestone_number: components["parameters"]["milestone_number"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - "issues/update-milestone": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** milestone_number parameter */ - milestone_number: components["parameters"]["milestone_number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["milestone"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The title of the milestone. */ - title?: string; - /** The state of the milestone. Either `open` or `closed`. */ - state?: "open" | "closed"; - /** A description of the milestone. */ - description?: string; - /** The milestone due date. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - due_on?: string; - }; - }; - }; - }; - "issues/list-labels-for-milestone": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** milestone_number parameter */ - milestone_number: components["parameters"]["milestone_number"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["label"][]; - }; - }; - }; - }; - /** List all notifications for the current user. */ - "activity/list-repo-notifications-for-authenticated-user": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** If `true`, show notifications marked as read. */ - all?: components["parameters"]["all"]; - /** If `true`, only shows notifications in which the user is directly participating or mentioned. */ - participating?: components["parameters"]["participating"]; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - before?: components["parameters"]["before"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["thread"][]; - }; - }; - }; - }; - /** Marks all notifications in a repository as "read" removes them from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List repository notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. */ - "activity/mark-repo-notifications-as-read": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 202: { - content: { - "application/json": { - message?: string; - url?: string; - }; - }; - }; - /** Reset Content */ - 205: unknown; - }; - requestBody: { - content: { - "application/json": { - /** Describes the last point that notifications were checked. Anything updated since this time will not be marked as read. If you omit this parameter, all notifications are marked as read. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. Default: The current timestamp. */ - last_read_at?: string; - }; - }; - }; - }; - "repos/get-pages": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["page"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** Updates information for a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). */ - "repos/update-information-about-pages-site": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - 400: components["responses"]["bad_request"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": (Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; - }>) & { - /** Specify a custom domain for the repository. Sending a `null` value will remove the custom domain. For more about custom domains, see "[Using a custom domain with GitHub Pages](https://help.github.com/articles/using-a-custom-domain-with-github-pages/)." */ - cname?: string | null; - /** Specify whether HTTPS should be enforced for the repository. */ - https_enforced?: boolean; - /** Configures access controls for the GitHub Pages site. If public is set to `true`, the site is accessible to anyone on the internet. If set to `false`, the site will only be accessible to users who have at least `read` access to the repository that published the site. This includes anyone in your Enterprise if the repository is set to `internal` visibility. This feature is only available to repositories in an organization on an Enterprise plan. */ - public?: boolean; - source?: Partial<"gh-pages" | "master" | "master /docs"> & Partial<{ - /** The repository branch used to publish your site's source files. */ - branch: string; - /** The repository directory that includes the source files for the Pages site. Allowed paths are `/` or `/docs`. */ - path: "/" | "/docs"; - }>; - }; - }; - }; - }; - /** Configures a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages)." */ - "repos/create-pages-site": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["page"]; - }; - }; - 409: components["responses"]["conflict"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The source branch and directory used to publish your Pages site. */ - source: { - /** The repository branch used to publish your site's source files. */ - branch: string; - /** The repository directory that includes the source files for the Pages site. Allowed paths are `/` or `/docs`. Default: `/` */ - path?: "/" | "/docs"; - }; - } | null; - }; - }; - }; - "repos/delete-pages-site": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - }; - "repos/list-pages-builds": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["page-build"][]; - }; - }; - }; - }; - /** - * You can request that your site be built from the latest revision on the default branch. This has the same effect as pushing a commit to your default branch, but does not require an additional commit. Manually triggering page builds can be helpful when diagnosing build warnings and failures. - * - * Build requests are limited to one concurrent build per repository and one concurrent build per requester. If you request a build while another is still in progress, the second request will be queued until the first completes. - */ - "repos/request-pages-build": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["page-build-status"]; - }; - }; - }; - }; - "repos/get-latest-pages-build": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["page-build"]; - }; - }; - }; - }; - "repos/get-pages-build": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - build_id: number; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["page-build"]; - }; - }; - }; - }; - /** - * Gets a health check of the DNS settings for the `CNAME` record configured for a repository's GitHub Pages. - * - * The first request to this endpoint returns a `202 Accepted` status and starts an asynchronous background task to get the results for the domain. After the background task completes, subsequent requests to this endpoint return a `200 OK` status with the health check results in the response. - * - * Users must have admin or owner permissions. GitHub Apps must have the `pages:write` and `administration:write` permission to use this endpoint. - */ - "repos/get-pages-health-check": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["pages-health-check"]; - }; - }; - /** Empty response */ - 202: unknown; - /** Custom domains are not available for GitHub Pages */ - 400: unknown; - 404: components["responses"]["not_found"]; - /** There isn't a CNAME for this page */ - 422: unknown; - }; - }; - /** Lists the projects in a repository. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ - "projects/list-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Indicates the state of the projects to return. Can be either `open`, `closed`, or `all`. */ - state?: "open" | "closed" | "all"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["project"][]; - }; - }; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - 422: components["responses"]["validation_failed_simple"]; - }; - }; - /** Creates a repository project board. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ - "projects/create-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["project"]; - }; - }; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 410: components["responses"]["gone"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** The name of the project. */ - name: string; - /** The description of the project. */ - body?: string; - }; - }; - }; - }; - /** Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ - "pulls/list": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Either `open`, `closed`, or `all` to filter by state. */ - state?: "open" | "closed" | "all"; - /** Filter pulls by head user or head organization and branch name in the format of `user:ref-name` or `organization:ref-name`. For example: `github:new-script-format` or `octocat:test-branch`. */ - head?: string; - /** Filter pulls by base branch name. Example: `gh-pages`. */ - base?: string; - /** What to sort results by. Can be either `created`, `updated`, `popularity` (comment count) or `long-running` (age, filtering by pulls updated in the last month). */ - sort?: "created" | "updated" | "popularity" | "long-running"; - /** The direction of the sort. Can be either `asc` or `desc`. Default: `desc` when sort is `created` or sort is not specified, otherwise `asc`. */ - direction?: "asc" | "desc"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["pull-request-simple"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** - * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. - * - * You can create a new pull request. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - "pulls/create": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["pull-request"]; - }; - }; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The title of the new pull request. */ - title?: string; - /** The name of the branch where your changes are implemented. For cross-repository pull requests in the same network, namespace `head` with a user like this: `username:branch`. */ - head: string; - /** The name of the branch you want the changes pulled into. This should be an existing branch on the current repository. You cannot submit a pull request to one repository that requests a merge to a base of another repository. */ - base: string; - /** The contents of the pull request. */ - body?: string; - /** Indicates whether [maintainers can modify](https://help.github.com/articles/allowing-changes-to-a-pull-request-branch-created-from-a-fork/) the pull request. */ - maintainer_can_modify?: boolean; - /** Indicates whether the pull request is a draft. See "[Draft Pull Requests](https://help.github.com/en/articles/about-pull-requests#draft-pull-requests)" in the GitHub Help documentation to learn more. */ - draft?: boolean; - issue?: number; - }; - }; - }; - }; - /** Lists review comments for all pull requests in a repository. By default, review comments are in ascending order by ID. */ - "pulls/list-review-comments-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - sort?: "created" | "updated" | "created_at"; - /** Can be either `asc` or `desc`. Ignored without `sort` parameter. */ - direction?: "asc" | "desc"; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["pull-request-review-comment"][]; - }; - }; - }; - }; - /** Provides details for a review comment. */ - "pulls/get-review-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["pull-request-review-comment"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** Deletes a review comment. */ - "pulls/delete-review-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - }; - }; - /** Enables you to edit a review comment. */ - "pulls/update-review-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["pull-request-review-comment"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The text of the reply to the review comment. */ - body: string; - }; - }; - }; - }; - /** List the reactions to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). */ - "reactions/list-for-pull-request-review-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - query: { - /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a pull request review comment. */ - content?: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["reaction"][]; - }; - }; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with a `Status: 200 OK` means that you already added the reaction type to this pull request review comment. */ - "reactions/create-for-pull-request-review-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Reaction exists */ - 200: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - /** Reaction created */ - 201: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the pull request review comment. */ - content: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - }; - }; - }; - }; - /** - * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/pulls/comments/:comment_id/reactions/:reaction_id.` - * - * Delete a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). - */ - "reactions/delete-for-pull-request-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - reaction_id: components["parameters"]["reaction-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Lists details of a pull request by providing its number. - * - * When you get, [create](https://docs.github.com/rest/reference/pulls/#create-a-pull-request), or [edit](https://docs.github.com/rest/reference/pulls#update-a-pull-request) a pull request, GitHub creates a merge commit to test whether the pull request can be automatically merged into the base branch. This test commit is not added to the base branch or the head branch. You can review the status of the test commit using the `mergeable` key. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". - * - * The value of the `mergeable` attribute can be `true`, `false`, or `null`. If the value is `null`, then GitHub has started a background job to compute the mergeability. After giving the job time to complete, resubmit the request. When the job finishes, you will see a non-`null` value for the `mergeable` attribute in the response. If `mergeable` is `true`, then `merge_commit_sha` will be the SHA of the _test_ merge commit. - * - * The value of the `merge_commit_sha` attribute changes depending on the state of the pull request. Before merging a pull request, the `merge_commit_sha` attribute holds the SHA of the _test_ merge commit. After merging a pull request, the `merge_commit_sha` attribute changes depending on how you merged the pull request: - * - * * If merged as a [merge commit](https://help.github.com/articles/about-merge-methods-on-github/), `merge_commit_sha` represents the SHA of the merge commit. - * * If merged via a [squash](https://help.github.com/articles/about-merge-methods-on-github/#squashing-your-merge-commits), `merge_commit_sha` represents the SHA of the squashed commit on the base branch. - * * If [rebased](https://help.github.com/articles/about-merge-methods-on-github/#rebasing-and-merging-your-commits), `merge_commit_sha` represents the commit that the base branch was updated to. - * - * Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. - */ - "pulls/get": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - }; - responses: { - /** Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. */ - 200: { - content: { - "application/json": components["schemas"]["pull-request"]; - }; - }; - 304: components["responses"]["not_modified"]; - 404: components["responses"]["not_found"]; - 500: components["responses"]["internal_error"]; - }; - }; - /** - * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. - */ - "pulls/update": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["pull-request"]; - }; - }; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The title of the pull request. */ - title?: string; - /** The contents of the pull request. */ - body?: string; - /** State of this Pull Request. Either `open` or `closed`. */ - state?: "open" | "closed"; - /** The name of the branch you want your changes pulled into. This should be an existing branch on the current repository. You cannot update the base branch on a pull request to point to another repository. */ - base?: string; - /** Indicates whether [maintainers can modify](https://help.github.com/articles/allowing-changes-to-a-pull-request-branch-created-from-a-fork/) the pull request. */ - maintainer_can_modify?: boolean; - }; - }; - }; - }; - /** Lists all review comments for a pull request. By default, review comments are in ascending order by ID. */ - "pulls/list-review-comments": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - query: { - /** One of `created` (when the repository was starred) or `updated` (when it was last pushed to). */ - sort?: components["parameters"]["sort"]; - /** Can be either `asc` or `desc`. Ignored without `sort` parameter. */ - direction?: "asc" | "desc"; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["pull-request-review-comment"][]; - }; - }; - }; - }; - /** - * Creates a review comment in the pull request diff. To add a regular comment to a pull request timeline, see "[Create an issue comment](https://docs.github.com/rest/reference/issues#create-an-issue-comment)." We recommend creating a review comment using `line`, `side`, and optionally `start_line` and `start_side` if your comment applies to more than one line in the pull request diff. - * - * You can still create a review comment using the `position` parameter. When you use `position`, the `line`, `side`, `start_line`, and `start_side` parameters are not required. For more information, see the [`comfort-fade` preview notice](https://docs.github.com/rest/reference/pulls#create-a-review-comment-for-a-pull-request-preview-notices). - * - * **Note:** The position value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - "pulls/create-review-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["pull-request-review-comment"]; - }; - }; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The text of the review comment. */ - body: string; - /** The SHA of the commit needing a comment. Not using the latest commit SHA may render your comment outdated if a subsequent commit modifies the line you specify as the `position`. */ - commit_id?: string; - /** The relative path to the file that necessitates a comment. */ - path?: string; - /** **Required without `comfort-fade` preview**. The position in the diff where you want to add a review comment. Note this value is not the same as the line number in the file. For help finding the position value, read the note above. */ - position?: number; - /** **Required with `comfort-fade` preview**. In a split diff view, the side of the diff that the pull request's changes appear on. Can be `LEFT` or `RIGHT`. Use `LEFT` for deletions that appear in red. Use `RIGHT` for additions that appear in green or unchanged lines that appear in white and are shown for context. For a multi-line comment, side represents whether the last line of the comment range is a deletion or addition. For more information, see "[Diff view options](https://help.github.com/en/articles/about-comparing-branches-in-pull-requests#diff-view-options)" in the GitHub Help documentation. */ - side?: "LEFT" | "RIGHT"; - /** **Required with `comfort-fade` preview**. The line of the blob in the pull request diff that the comment applies to. For a multi-line comment, the last line of the range that your comment applies to. */ - line?: number; - /** **Required when using multi-line comments**. To create multi-line comments, you must use the `comfort-fade` preview header. The `start_line` is the first line in the pull request diff that your multi-line comment applies to. To learn more about multi-line comments, see "[Commenting on a pull request](https://help.github.com/en/articles/commenting-on-a-pull-request#adding-line-comments-to-a-pull-request)" in the GitHub Help documentation. */ - start_line?: number; - /** **Required when using multi-line comments**. To create multi-line comments, you must use the `comfort-fade` preview header. The `start_side` is the starting side of the diff that the comment applies to. Can be `LEFT` or `RIGHT`. To learn more about multi-line comments, see "[Commenting on a pull request](https://help.github.com/en/articles/commenting-on-a-pull-request#adding-line-comments-to-a-pull-request)" in the GitHub Help documentation. See `side` in this table for additional context. */ - start_side?: "LEFT" | "RIGHT" | "side"; - in_reply_to?: number; - }; - }; - }; - }; - /** - * Creates a reply to a review comment for a pull request. For the `comment_id`, provide the ID of the review comment you are replying to. This must be the ID of a _top-level review comment_, not a reply to that comment. Replies to replies are not supported. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - "pulls/create-reply-for-review-comment": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - /** comment_id parameter */ - comment_id: components["parameters"]["comment_id"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["pull-request-review-comment"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - requestBody: { - content: { - "application/json": { - /** The text of the review comment. */ - body: string; - }; - }; - }; - }; - /** Lists a maximum of 250 commits for a pull request. To receive a complete commit list for pull requests with more than 250 commits, use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) endpoint. */ - "pulls/list-commits": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["commit"][]; - }; - }; - }; - }; - /** **Note:** Responses include a maximum of 3000 files. The paginated response returns 30 files per page by default. */ - "pulls/list-files": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["diff-entry"][]; - }; - }; - 422: components["responses"]["validation_failed"]; - 500: components["responses"]["internal_error"]; - }; - }; - "pulls/check-if-merged": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - }; - responses: { - /** Response if pull request has been merged */ - 204: never; - /** Not Found if pull request has not been merged */ - 404: unknown; - }; - }; - /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details. */ - "pulls/merge": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - }; - responses: { - /** if merge was successful */ - 200: { - content: { - "application/json": components["schemas"]["pull-request-merge-result"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - /** Method Not Allowed if merge cannot be performed */ - 405: { - content: { - "application/json": { - message?: string; - documentation_url?: string; - }; - }; - }; - /** Conflict if sha was provided and pull request head did not match */ - 409: { - content: { - "application/json": { - message?: string; - documentation_url?: string; - }; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Title for the automatic commit message. */ - commit_title?: string; - /** Extra detail to append to automatic commit message. */ - commit_message?: string; - /** SHA that pull request head must match to allow merge. */ - sha?: string; - /** Merge method to use. Possible values are `merge`, `squash` or `rebase`. Default is `merge`. */ - merge_method?: "merge" | "squash" | "rebase"; - } | null; - }; - }; - }; - "pulls/list-requested-reviewers": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["pull-request-review-request"]; - }; - }; - }; - }; - /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-abuse-rate-limits)" for details. */ - "pulls/request-reviewers": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["pull-request-simple"]; - }; - }; - 403: components["responses"]["forbidden"]; - /** Unprocessable Entity if user is not a collaborator */ - 422: unknown; - }; - requestBody: { - content: { - "application/json": (Partial<{ - [key: string]: any; - }> & Partial<{ - [key: string]: any; - }>) & { - /** An array of user `login`s that will be requested. */ - reviewers?: string[]; - /** An array of team `slug`s that will be requested. */ - team_reviewers?: string[]; - }; - }; - }; - }; - "pulls/remove-requested-reviewers": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["pull-request-simple"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** An array of user `login`s that will be removed. */ - reviewers: string[]; - /** An array of team `slug`s that will be removed. */ - team_reviewers?: string[]; - }; - }; - }; - }; - /** The list of reviews returns in chronological order. */ - "pulls/list-reviews": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** The list of reviews returns in chronological order. */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["pull-request-review"][]; - }; - }; - }; - }; - /** - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - * - * Pull request reviews created in the `PENDING` state do not include the `submitted_at` property in the response. - * - * **Note:** To comment on a specific line in a file, you need to first determine the _position_ of that line in the diff. The GitHub REST API v3 offers the `application/vnd.github.v3.diff` [media type](https://docs.github.com/rest/overview/media-types#commits-commit-comparison-and-pull-requests). To see a pull request diff, add this media type to the `Accept` header of a call to the [single pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) endpoint. - * - * The `position` value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. - */ - "pulls/create-review": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["pull-request-review"]; - }; - }; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** The SHA of the commit that needs a review. Not using the latest commit SHA may render your review comment outdated if a subsequent commit modifies the line you specify as the `position`. Defaults to the most recent commit in the pull request when you do not specify a value. */ - commit_id?: string; - /** **Required** when using `REQUEST_CHANGES` or `COMMENT` for the `event` parameter. The body text of the pull request review. */ - body?: string; - /** The review action you want to perform. The review actions include: `APPROVE`, `REQUEST_CHANGES`, or `COMMENT`. By leaving this blank, you set the review action state to `PENDING`, which means you will need to [submit the pull request review](https://docs.github.com/rest/reference/pulls#submit-a-review-for-a-pull-request) when you are ready. */ - event?: "APPROVE" | "REQUEST_CHANGES" | "COMMENT"; - /** Use the following table to specify the location, destination, and contents of the draft review comment. */ - comments?: { - /** The relative path to the file that necessitates a review comment. */ - path: string; - /** The position in the diff where you want to add a review comment. Note this value is not the same as the line number in the file. For help finding the position value, read the note below. */ - position?: number; - /** Text of the review comment. */ - body: string; - line?: number; - side?: string; - start_line?: number; - start_side?: string; - }[]; - }; - }; - }; - }; - "pulls/get-review": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - /** review_id parameter */ - review_id: components["parameters"]["review_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["pull-request-review"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** Update the review summary comment with new text. */ - "pulls/update-review": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - /** review_id parameter */ - review_id: components["parameters"]["review_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["pull-request-review"]; - }; - }; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** The body text of the pull request review. */ - body: string; - }; - }; - }; - }; - "pulls/delete-pending-review": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - /** review_id parameter */ - review_id: components["parameters"]["review_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["pull-request-review"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed_simple"]; - }; - }; - /** List comments for a specific pull request review. */ - "pulls/list-comments-for-review": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - /** review_id parameter */ - review_id: components["parameters"]["review_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["review-comment"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** **Note:** To dismiss a pull request review on a [protected branch](https://docs.github.com/rest/reference/repos#branches), you must be a repository administrator or be included in the list of people or teams who can dismiss pull request reviews. */ - "pulls/dismiss-review": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - /** review_id parameter */ - review_id: components["parameters"]["review_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["pull-request-review"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** The message for the pull request review dismissal */ - message: string; - event?: string; - }; - }; - }; - }; - "pulls/submit-review": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - /** review_id parameter */ - review_id: components["parameters"]["review_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["pull-request-review"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** The body text of the pull request review */ - body?: string; - /** The review action you want to perform. The review actions include: `APPROVE`, `REQUEST_CHANGES`, or `COMMENT`. When you leave this blank, the API returns _HTTP 422 (Unrecognizable entity)_ and sets the review action state to `PENDING`, which means you will need to re-submit the pull request review using a review action. */ - event: "APPROVE" | "REQUEST_CHANGES" | "COMMENT"; - }; - }; - }; - }; - /** Updates the pull request branch with the latest upstream changes by merging HEAD from the base branch into the pull request branch. */ - "pulls/update-branch": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - pull_number: components["parameters"]["pull-number"]; - }; - }; - responses: { - /** Response */ - 202: { - content: { - "application/json": { - message?: string; - url?: string; - }; - }; - }; - 403: components["responses"]["forbidden"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The expected SHA of the pull request's HEAD ref. This is the most recent commit on the pull request's branch. If the expected SHA does not match the pull request's HEAD, you will receive a `422 Unprocessable Entity` status. You can use the "[List commits](https://docs.github.com/rest/reference/repos#list-commits)" endpoint to find the most recent commit SHA. Default: SHA of the pull request's current HEAD ref. */ - expected_head_sha?: string; - } | null; - }; - }; - }; - /** - * Gets the preferred README for a repository. - * - * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. - */ - "repos/get-readme": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** The name of the commit/branch/tag. Default: the repository’s default branch (usually `master`) */ - ref?: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["content-file"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** - * Gets the README from a repository directory. - * - * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. - */ - "repos/get-readme-in-directory": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The alternate path to look for a README file */ - dir: string; - }; - query: { - /** The name of the commit/branch/tag. Default: the repository’s default branch (usually `master`) */ - ref?: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["content-file"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** - * This returns a list of releases, which does not include regular Git tags that have not been associated with a release. To get a list of Git tags, use the [Repository Tags API](https://docs.github.com/rest/reference/repos#list-repository-tags). - * - * Information about published releases are available to everyone. Only users with push access will receive listings for draft releases. - */ - "repos/list-releases": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["release"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Users with push access to the repository can create a release. - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - "repos/create-release": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["release"]; - }; - }; - /** Not Found if the discussion category name is invalid */ - 404: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The name of the tag. */ - tag_name: string; - /** Specifies the commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Unused if the Git tag already exists. Default: the repository's default branch (usually `master`). */ - target_commitish?: string; - /** The name of the release. */ - name?: string; - /** Text describing the contents of the tag. */ - body?: string; - /** `true` to create a draft (unpublished) release, `false` to create a published one. */ - draft?: boolean; - /** `true` to identify the release as a prerelease. `false` to identify the release as a full release. */ - prerelease?: boolean; - /** If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. For more information, see "[Managing categories for discussions in your repository](https://docs.github.com/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository)." */ - discussion_category_name?: string; - }; - }; - }; - }; - /** To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. */ - "repos/get-release-asset": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** asset_id parameter */ - asset_id: components["parameters"]["asset_id"]; - }; - }; - responses: { - /** To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. */ - 200: { - content: { - "application/json": components["schemas"]["release-asset"]; - }; - }; - 302: components["responses"]["found"]; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - "repos/delete-release-asset": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** asset_id parameter */ - asset_id: components["parameters"]["asset_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** Users with push access to the repository can edit a release asset. */ - "repos/update-release-asset": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** asset_id parameter */ - asset_id: components["parameters"]["asset_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["release-asset"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The file name of the asset. */ - name?: string; - /** An alternate short description of the asset. Used in place of the filename. */ - label?: string; - state?: string; - }; - }; - }; - }; - /** - * View the latest published full release for the repository. - * - * The latest release is the most recent non-prerelease, non-draft release, sorted by the `created_at` attribute. The `created_at` attribute is the date of the commit used for the release, and not the date when the release was drafted or published. - */ - "repos/get-latest-release": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["release"]; - }; - }; - }; - }; - /** Get a published release with the specified tag. */ - "repos/get-release-by-tag": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** tag parameter */ - tag: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["release"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). */ - "repos/get-release": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** release_id parameter */ - release_id: components["parameters"]["release_id"]; - }; - }; - responses: { - /** **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). */ - 200: { - content: { - "application/json": components["schemas"]["release"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** Users with push access to the repository can delete a release. */ - "repos/delete-release": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** release_id parameter */ - release_id: components["parameters"]["release_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** Users with push access to the repository can edit a release. */ - "repos/update-release": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** release_id parameter */ - release_id: components["parameters"]["release_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["release"]; - }; - }; - /** Not Found if the discussion category name is invalid */ - 404: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The name of the tag. */ - tag_name?: string; - /** Specifies the commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Unused if the Git tag already exists. Default: the repository's default branch (usually `master`). */ - target_commitish?: string; - /** The name of the release. */ - name?: string; - /** Text describing the contents of the tag. */ - body?: string; - /** `true` makes the release a draft, and `false` publishes the release. */ - draft?: boolean; - /** `true` to identify the release as a prerelease, `false` to identify the release as a full release. */ - prerelease?: boolean; - /** If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. If there is already a discussion linked to the release, this parameter is ignored. For more information, see "[Managing categories for discussions in your repository](https://docs.github.com/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository)." */ - discussion_category_name?: string; - }; - }; - }; - }; - "repos/list-release-assets": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** release_id parameter */ - release_id: components["parameters"]["release_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["release-asset"][]; - }; - }; - }; - }; - /** - * This endpoint makes use of [a Hypermedia relation](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) to determine which URL to access. The endpoint you call to upload release assets is specific to your release. Use the `upload_url` returned in - * the response of the [Create a release endpoint](https://docs.github.com/rest/reference/repos#create-a-release) to upload a release asset. - * - * You need to use an HTTP client which supports [SNI](http://en.wikipedia.org/wiki/Server_Name_Indication) to make calls to this endpoint. - * - * Most libraries will set the required `Content-Length` header automatically. Use the required `Content-Type` header to provide the media type of the asset. For a list of media types, see [Media Types](https://www.iana.org/assignments/media-types/media-types.xhtml). For example: - * - * `application/zip` - * - * GitHub expects the asset data in its raw binary form, rather than JSON. You will send the raw binary content of the asset as the request body. Everything else about the endpoint is the same as the rest of the API. For example, - * you'll still need to pass your authentication to be able to upload an asset. - * - * When an upstream failure occurs, you will receive a `502 Bad Gateway` status. This may leave an empty asset with a state of `starter`. It can be safely deleted. - * - * **Notes:** - * * GitHub renames asset filenames that have special characters, non-alphanumeric characters, and leading or trailing periods. The "[List assets for a release](https://docs.github.com/rest/reference/repos#list-assets-for-a-release)" - * endpoint lists the renamed filenames. For more information and help, contact [GitHub Support](https://support.github.com/contact). - * * If you upload an asset with the same filename as another uploaded asset, you'll receive an error and must delete the old file before you can re-upload the new asset. - */ - "repos/upload-release-asset": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** release_id parameter */ - release_id: components["parameters"]["release_id"]; - }; - query: { - name?: string; - label?: string; - }; - }; - responses: { - /** Response for successful upload */ - 201: { - content: { - "application/json": components["schemas"]["release-asset"]; - }; - }; - }; - requestBody: { - content: { - "*/*": string; - }; - }; - }; - /** - * Lists all secret scanning alerts for a private repository, from newest to oldest. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope. - * - * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. - */ - "secret-scanning/list-alerts-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Set to `open` or `resolved` to only list secret scanning alerts in a specific state. */ - state?: "open" | "resolved"; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["secret-scanning-alert"][]; - }; - }; - /** Repository is public or secret scanning is disabled for the repository */ - 404: unknown; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** - * Gets a single secret scanning alert detected in a private repository. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope. - * - * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. - */ - "secret-scanning/get-alert": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ - alert_number: components["parameters"]["alert_number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["secret-scanning-alert"]; - }; - }; - /** Repository is public, or secret scanning is disabled for the repository, or the resource is not found */ - 404: unknown; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** - * Updates the status of a secret scanning alert in a private repository. To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with the `repo` scope or `security_events` scope. - * - * GitHub Apps must have the `secret_scanning_alerts` write permission to use this endpoint. - */ - "secret-scanning/update-alert": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ - alert_number: components["parameters"]["alert_number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["secret-scanning-alert"]; - }; - }; - /** Repository is public, or secret scanning is disabled for the repository, or the resource is not found */ - 404: unknown; - /** State does not match the resolution */ - 422: unknown; - 503: components["responses"]["service_unavailable"]; - }; - requestBody: { - content: { - "application/json": { - state: components["schemas"]["secret-scanning-alert-state"]; - resolution?: components["schemas"]["secret-scanning-alert-resolution"]; - }; - }; - }; - }; - /** - * Lists the people that have starred the repository. - * - * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: - */ - "activity/list-stargazers-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": Partial & Partial; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - }; - /** Returns a weekly aggregate of the number of additions and deletions pushed to a repository. */ - "repos/get-code-frequency-stats": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Returns a weekly aggregate of the number of additions and deletions pushed to a repository. */ - 200: { - content: { - "application/json": components["schemas"]["code-frequency-stat"][]; - }; - }; - 202: components["responses"]["accepted"]; - 204: components["responses"]["no_content"]; - }; - }; - /** Returns the last year of commit activity grouped by week. The `days` array is a group of commits per day, starting on `Sunday`. */ - "repos/get-commit-activity-stats": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["commit-activity"][]; - }; - }; - 202: components["responses"]["accepted"]; - 204: components["responses"]["no_content"]; - }; - }; - /** - * Returns the `total` number of commits authored by the contributor. In addition, the response includes a Weekly Hash (`weeks` array) with the following information: - * - * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). - * * `a` - Number of additions - * * `d` - Number of deletions - * * `c` - Number of commits - */ - "repos/get-contributors-stats": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** - * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). - * * `a` - Number of additions - * * `d` - Number of deletions - * * `c` - Number of commits - */ - 200: { - content: { - "application/json": components["schemas"]["contributor-activity"][]; - }; - }; - 202: components["responses"]["accepted"]; - 204: components["responses"]["no_content"]; - }; - }; - /** - * Returns the total commit counts for the `owner` and total commit counts in `all`. `all` is everyone combined, including the `owner` in the last 52 weeks. If you'd like to get the commit counts for non-owners, you can subtract `owner` from `all`. - * - * The array order is oldest week (index 0) to most recent week. - */ - "repos/get-participation-stats": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** The array order is oldest week (index 0) to most recent week. */ - 200: { - content: { - "application/json": components["schemas"]["participation-stats"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Each array contains the day number, hour number, and number of commits: - * - * * `0-6`: Sunday - Saturday - * * `0-23`: Hour of day - * * Number of commits - * - * For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. - */ - "repos/get-punch-card-stats": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. */ - 200: { - content: { - "application/json": components["schemas"]["code-frequency-stat"][]; - }; - }; - 204: components["responses"]["no_content"]; - }; - }; - /** - * Users with push access in a repository can create commit statuses for a given SHA. - * - * Note: there is a limit of 1000 statuses per `sha` and `context` within a repository. Attempts to create more than 1000 statuses will result in a validation error. - */ - "repos/create-commit-status": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - sha: string; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["status"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The state of the status. Can be one of `error`, `failure`, `pending`, or `success`. */ - state: "error" | "failure" | "pending" | "success"; - /** - * The target URL to associate with this status. This URL will be linked from the GitHub UI to allow users to easily see the source of the status. - * For example, if your continuous integration system is posting build status, you would want to provide the deep link for the build output for this specific SHA: - * `http://ci.example.com/user/repo/build/sha` - */ - target_url?: string; - /** A short description of the status. */ - description?: string; - /** A string label to differentiate this status from the status of other systems. This field is case-insensitive. */ - context?: string; - }; - }; - }; - }; - /** Lists the people watching the specified repository. */ - "activity/list-watchers-for-repo": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - }; - }; - "activity/get-repo-subscription": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** if you subscribe to the repository */ - 200: { - content: { - "application/json": components["schemas"]["repository-subscription"]; - }; - }; - 403: components["responses"]["forbidden"]; - /** Not Found if you don't subscribe to the repository */ - 404: unknown; - }; - }; - /** If you would like to watch a repository, set `subscribed` to `true`. If you would like to ignore notifications made within a repository, set `ignored` to `true`. If you would like to stop watching a repository, [delete the repository's subscription](https://docs.github.com/rest/reference/activity#delete-a-repository-subscription) completely. */ - "activity/set-repo-subscription": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["repository-subscription"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** Determines if notifications should be received from this repository. */ - subscribed?: boolean; - /** Determines if all notifications should be blocked from this repository. */ - ignored?: boolean; - }; - }; - }; - }; - /** This endpoint should only be used to stop watching a repository. To control whether or not you wish to receive notifications from a repository, [set the repository's subscription manually](https://docs.github.com/rest/reference/activity#set-a-repository-subscription). */ - "activity/delete-repo-subscription": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - "repos/list-tags": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["tag"][]; - }; - }; - }; - }; - /** - * Gets a redirect URL to download a tar archive for a repository. If you omit `:ref`, the repository’s default branch (usually - * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use - * the `Location` header to make a second `GET` request. - * **Note**: For private repositories, these links are temporary and expire after five minutes. - */ - "repos/download-tarball-archive": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - ref: string; - }; - }; - responses: { - /** Response */ - 302: never; - }; - }; - "repos/list-teams": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["team"][]; - }; - }; - }; - }; - "repos/get-all-topics": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["topic"]; - }; - }; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - "repos/replace-all-topics": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["topic"]; - }; - }; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** An array of topics to add to the repository. Pass one or more topics to _replace_ the set of existing topics. Send an empty array (`[]`) to clear all topics from the repository. **Note:** Topic `names` cannot contain uppercase letters. */ - names: string[]; - }; - }; - }; - }; - /** Get the total number of clones and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. */ - "repos/get-clones": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Must be one of: `day`, `week`. */ - per?: components["parameters"]["per"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["clone-traffic"]; - }; - }; - 403: components["responses"]["forbidden"]; - }; - }; - /** Get the top 10 popular contents over the last 14 days. */ - "repos/get-top-paths": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["content-traffic"][]; - }; - }; - 403: components["responses"]["forbidden"]; - }; - }; - /** Get the top 10 referrers over the last 14 days. */ - "repos/get-top-referrers": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["referrer-traffic"][]; - }; - }; - 403: components["responses"]["forbidden"]; - }; - }; - /** Get the total number of views and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. */ - "repos/get-views": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - query: { - /** Must be one of: `day`, `week`. */ - per?: components["parameters"]["per"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["view-traffic"]; - }; - }; - 403: components["responses"]["forbidden"]; - }; - }; - /** A transfer request will need to be accepted by the new owner when transferring a personal repository to another user. The response will contain the original `owner`, and the transfer will continue asynchronously. For more details on the requirements to transfer personal and organization-owned repositories, see [about repository transfers](https://help.github.com/articles/about-repository-transfers/). */ - "repos/transfer": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 202: { - content: { - "application/json": components["schemas"]["minimal-repository"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The username or organization name the repository will be transferred to. */ - new_owner: string; - /** ID of the team or teams to add to the repository. Teams can only be added to organization-owned repositories. */ - team_ids?: number[]; - }; - }; - }; - }; - /** Shows whether dependency alerts are enabled or disabled for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://help.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ - "repos/check-vulnerability-alerts": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response if repository is enabled with vulnerability alerts */ - 204: never; - /** Not Found if repository is not enabled with vulnerability alerts */ - 404: unknown; - }; - }; - /** Enables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://help.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ - "repos/enable-vulnerability-alerts": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** Disables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://help.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ - "repos/disable-vulnerability-alerts": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually - * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use - * the `Location` header to make a second `GET` request. - * **Note**: For private repositories, these links are temporary and expire after five minutes. - */ - "repos/download-zipball-archive": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - ref: string; - }; - }; - responses: { - /** Response */ - 302: never; - }; - }; - /** - * Creates a new repository using a repository template. Use the `template_owner` and `template_repo` route parameters to specify the repository to use as the template. The authenticated user must own or be a member of an organization that owns the repository. To check if a repository is available to use as a template, get the repository's information using the [Get a repository](https://docs.github.com/rest/reference/repos#get-a-repository) endpoint and check that the `is_template` key is `true`. - * - * **OAuth scope requirements** - * - * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: - * - * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. - * * `repo` scope to create a private repository - */ - "repos/create-using-template": { - parameters: { - path: { - template_owner: string; - template_repo: string; - }; - }; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["repository"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The organization or person who will own the new repository. To create a new repository in an organization, the authenticated user must be a member of the specified organization. */ - owner?: string; - /** The name of the new repository. */ - name: string; - /** A short description of the new repository. */ - description?: string; - /** Set to `true` to include the directory structure and files from all branches in the template repository, and not just the default branch. Default: `false`. */ - include_all_branches?: boolean; - /** Either `true` to create a new private repository or `false` to create a new public one. */ - private?: boolean; - }; - }; - }; - }; - /** - * Lists all public repositories in the order that they were created. - * - * Notes: - * - For GitHub Enterprise Server and GitHub AE, this endpoint will only list repositories available to all users on the enterprise. - * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of repositories. - */ - "repos/list-public": { - parameters: { - query: { - /** A repository ID. Only return repositories with an ID greater than this ID. */ - since?: components["parameters"]["since-repo"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: { - Link?: string; - }; - content: { - "application/json": components["schemas"]["minimal-repository"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** Lists all secrets available in an environment without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - "actions/list-environment-secrets": { - parameters: { - path: { - repository_id: components["parameters"]["repository_id"]; - /** The name of the environment */ - environment_name: components["parameters"]["environment_name"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - secrets: components["schemas"]["actions-secret"][]; - }; - }; - }; - }; - }; - /** Get the public key for an environment, which you need to encrypt environment secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - "actions/get-environment-public-key": { - parameters: { - path: { - repository_id: components["parameters"]["repository_id"]; - /** The name of the environment */ - environment_name: components["parameters"]["environment_name"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["actions-public-key"]; - }; - }; - }; - }; - /** Gets a single environment secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - "actions/get-environment-secret": { - parameters: { - path: { - repository_id: components["parameters"]["repository_id"]; - /** The name of the environment */ - environment_name: components["parameters"]["environment_name"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["actions-secret"]; - }; - }; - }; - }; - /** - * Creates or updates an environment secret with an encrypted value. Encrypt your secret using - * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access - * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use - * this endpoint. - * - * #### Example encrypting a secret using Node.js - * - * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. - * - * ``` - * const sodium = require('tweetsodium'); - * - * const key = "base64-encoded-public-key"; - * const value = "plain-text-secret"; - * - * // Convert the message and key to Uint8Array's (Buffer implements that interface) - * const messageBytes = Buffer.from(value); - * const keyBytes = Buffer.from(key, 'base64'); - * - * // Encrypt using LibSodium. - * const encryptedBytes = sodium.seal(messageBytes, keyBytes); - * - * // Base64 the encrypted secret - * const encrypted = Buffer.from(encryptedBytes).toString('base64'); - * - * console.log(encrypted); - * ``` - * - * - * #### Example encrypting a secret using Python - * - * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/stable/public/#nacl-public-sealedbox) with Python 3. - * - * ``` - * from base64 import b64encode - * from nacl import encoding, public - * - * def encrypt(public_key: str, secret_value: str) -> str: - * """Encrypt a Unicode string using the public key.""" - * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) - * sealed_box = public.SealedBox(public_key) - * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) - * return b64encode(encrypted).decode("utf-8") - * ``` - * - * #### Example encrypting a secret using C# - * - * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. - * - * ``` - * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); - * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); - * - * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); - * - * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); - * ``` - * - * #### Example encrypting a secret using Ruby - * - * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. - * - * ```ruby - * require "rbnacl" - * require "base64" - * - * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") - * public_key = RbNaCl::PublicKey.new(key) - * - * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) - * encrypted_secret = box.encrypt("my_secret") - * - * # Print the base64 encoded secret - * puts Base64.strict_encode64(encrypted_secret) - * ``` - */ - "actions/create-or-update-environment-secret": { - parameters: { - path: { - repository_id: components["parameters"]["repository_id"]; - /** The name of the environment */ - environment_name: components["parameters"]["environment_name"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - }; - }; - responses: { - /** Response when creating a secret */ - 201: unknown; - /** Response when updating a secret */ - 204: never; - }; - requestBody: { - content: { - "application/json": { - /** Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an environment public key](https://docs.github.com/rest/reference/actions#get-an-environment-public-key) endpoint. */ - encrypted_value?: string; - /** ID of the key you used to encrypt the secret. */ - key_id?: string; - }; - }; - }; - }; - /** Deletes a secret in an environment using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ - "actions/delete-environment-secret": { - parameters: { - path: { - repository_id: components["parameters"]["repository_id"]; - /** The name of the environment */ - environment_name: components["parameters"]["environment_name"]; - /** secret_name parameter */ - secret_name: components["parameters"]["secret_name"]; - }; - }; - responses: { - /** Default response */ - 204: never; - }; - }; - /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ - "enterprise-admin/list-provisioned-groups-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - query: { - /** Used for pagination: the index of the first result to return. */ - startIndex?: components["parameters"]["start_index"]; - /** Used for pagination: the number of results to return. */ - count?: components["parameters"]["count"]; - /** filter results */ - filter?: string; - /** attributes to exclude */ - excludedAttributes?: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["scim-group-list-enterprise"]; - }; - }; - }; - }; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Provision an enterprise group, and invite users to the group. This sends invitation emails to the email address of the invited users to join the GitHub organization that the SCIM group corresponds to. - */ - "enterprise-admin/provision-and-invite-enterprise-group": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["scim-enterprise-group"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The SCIM schema URIs. */ - schemas: string[]; - /** The name of the SCIM group. This must match the GitHub organization that the group maps to. */ - displayName: string; - members?: { - /** The SCIM user ID for a user. */ - value: string; - }[]; - }; - }; - }; - }; - /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ - "enterprise-admin/get-provisioning-information-for-enterprise-group": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Identifier generated by the GitHub SCIM endpoint. */ - scim_group_id: components["parameters"]["scim_group_id"]; - }; - query: { - /** Attributes to exclude. */ - excludedAttributes?: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["scim-enterprise-group"]; - }; - }; - }; - }; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Replaces an existing provisioned group’s information. You must provide all the information required for the group as if you were provisioning it for the first time. Any existing group information that you don't provide will be removed, including group membership. If you want to only update a specific attribute, use the [Update an attribute for a SCIM enterprise group](#update-an-attribute-for-a-scim-enterprise-group) endpoint instead. - */ - "enterprise-admin/set-information-for-provisioned-enterprise-group": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Identifier generated by the GitHub SCIM endpoint. */ - scim_group_id: components["parameters"]["scim_group_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["scim-enterprise-group"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The SCIM schema URIs. */ - schemas: string[]; - /** The name of the SCIM group. This must match the GitHub organization that the group maps to. */ - displayName: string; - members?: { - /** The SCIM user ID for a user. */ - value: string; - }[]; - }; - }; - }; - }; - /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ - "enterprise-admin/delete-scim-group-from-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Identifier generated by the GitHub SCIM endpoint. */ - scim_group_id: components["parameters"]["scim_group_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Allows you to change a provisioned group’s individual attributes. To change a group’s values, you must provide a specific Operations JSON format that contains at least one of the add, remove, or replace operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2). - */ - "enterprise-admin/update-attribute-for-enterprise-group": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** Identifier generated by the GitHub SCIM endpoint. */ - scim_group_id: components["parameters"]["scim_group_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["scim-enterprise-group"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The SCIM schema URIs. */ - schemas: string[]; - /** Array of [SCIM operations](https://tools.ietf.org/html/rfc7644#section-3.5.2). */ - Operations: { - op: "add" | "Add" | "remove" | "Remove" | "replace" | "Replace"; - path?: string; - value?: string | { - [key: string]: any; - } | any[]; - }[]; - }; - }; - }; - }; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Retrieves a paginated list of all provisioned enterprise members, including pending invitations. - * - * When a user with a SAML-provisioned external identity leaves (or is removed from) an enterprise, the account's metadata is immediately removed. However, the returned list of user accounts might not always match the organization or enterprise member list you see on GitHub. This can happen in certain cases where an external identity associated with an organization will not match an organization member: - * - When a user with a SCIM-provisioned external identity is removed from an enterprise, the account's metadata is preserved to allow the user to re-join the organization in the future. - * - When inviting a user to join an organization, you can expect to see their external identity in the results before they accept the invitation, or if the invitation is cancelled (or never accepted). - * - When a user is invited over SCIM, an external identity is created that matches with the invitee's email address. However, this identity is only linked to a user account when the user accepts the invitation by going through SAML SSO. - * - * The returned list of external identities can include an entry for a `null` user. These are unlinked SAML identities that are created when a user goes through the following Single Sign-On (SSO) process but does not sign in to their GitHub account after completing SSO: - * - * 1. The user is granted access by the IdP and is not a member of the GitHub enterprise. - * - * 1. The user attempts to access the GitHub enterprise and initiates the SAML SSO process, and is not currently signed in to their GitHub account. - * - * 1. After successfully authenticating with the SAML SSO IdP, the `null` external identity entry is created and the user is prompted to sign in to their GitHub account: - * - If the user signs in, their GitHub account is linked to this entry. - * - If the user does not sign in (or does not create a new account when prompted), they are not added to the GitHub enterprise, and the external identity `null` entry remains in place. - */ - "enterprise-admin/list-provisioned-identities-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - query: { - /** Used for pagination: the index of the first result to return. */ - startIndex?: components["parameters"]["start_index"]; - /** Used for pagination: the number of results to return. */ - count?: components["parameters"]["count"]; - /** filter results */ - filter?: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["scim-user-list-enterprise"]; - }; - }; - }; - }; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Provision enterprise membership for a user, and send organization invitation emails to the email address. - * - * You can optionally include the groups a user will be invited to join. If you do not provide a list of `groups`, the user is provisioned for the enterprise, but no organization invitation emails will be sent. - */ - "enterprise-admin/provision-and-invite-enterprise-user": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["scim-enterprise-user"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The SCIM schema URIs. */ - schemas: string[]; - /** The username for the user. */ - userName: string; - name: { - /** The first name of the user. */ - givenName: string; - /** The last name of the user. */ - familyName: string; - }; - /** List of user emails. */ - emails: { - /** The email address. */ - value: string; - /** The type of email address. */ - type: string; - /** Whether this email address is the primary address. */ - primary: boolean; - }[]; - /** List of SCIM group IDs the user is a member of. */ - groups?: { - value?: string; - }[]; - }; - }; - }; - }; - /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ - "enterprise-admin/get-provisioning-information-for-enterprise-user": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** scim_user_id parameter */ - scim_user_id: components["parameters"]["scim_user_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["scim-enterprise-user"]; - }; - }; - }; - }; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Replaces an existing provisioned user's information. You must provide all the information required for the user as if you were provisioning them for the first time. Any existing user information that you don't provide will be removed. If you want to only update a specific attribute, use the [Update an attribute for a SCIM user](#update-an-attribute-for-an-enterprise-scim-user) endpoint instead. - * - * You must at least provide the required values for the user: `userName`, `name`, and `emails`. - * - * **Warning:** Setting `active: false` removes the user from the enterprise, deletes the external identity, and deletes the associated `{scim_user_id}`. - */ - "enterprise-admin/set-information-for-provisioned-enterprise-user": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** scim_user_id parameter */ - scim_user_id: components["parameters"]["scim_user_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["scim-enterprise-user"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The SCIM schema URIs. */ - schemas: string[]; - /** The username for the user. */ - userName: string; - name: { - /** The first name of the user. */ - givenName: string; - /** The last name of the user. */ - familyName: string; - }; - /** List of user emails. */ - emails: { - /** The email address. */ - value: string; - /** The type of email address. */ - type: string; - /** Whether this email address is the primary address. */ - primary: boolean; - }[]; - /** List of SCIM group IDs the user is a member of. */ - groups?: { - value?: string; - }[]; - }; - }; - }; - }; - /** **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. */ - "enterprise-admin/delete-user-from-enterprise": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** scim_user_id parameter */ - scim_user_id: components["parameters"]["scim_user_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * **Note:** The SCIM API endpoints for enterprise accounts are currently in beta and are subject to change. - * - * Allows you to change a provisioned user's individual attributes. To change a user's values, you must provide a specific `Operations` JSON format that contains at least one of the `add`, `remove`, or `replace` operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2). - * - * **Note:** Complicated SCIM `path` selectors that include filters are not supported. For example, a `path` selector defined as `"path": "emails[type eq \"work\"]"` will not work. - * - * **Warning:** If you set `active:false` using the `replace` operation (as shown in the JSON example below), it removes the user from the enterprise, deletes the external identity, and deletes the associated `:scim_user_id`. - * - * ``` - * { - * "Operations":[{ - * "op":"replace", - * "value":{ - * "active":false - * } - * }] - * } - * ``` - */ - "enterprise-admin/update-attribute-for-enterprise-user": { - parameters: { - path: { - /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ - enterprise: components["parameters"]["enterprise"]; - /** scim_user_id parameter */ - scim_user_id: components["parameters"]["scim_user_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["scim-enterprise-user"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The SCIM schema URIs. */ - schemas: string[]; - /** Array of [SCIM operations](https://tools.ietf.org/html/rfc7644#section-3.5.2). */ - Operations: { - [key: string]: any; - }[]; - }; - }; - }; - }; - /** - * Retrieves a paginated list of all provisioned organization members, including pending invitations. If you provide the `filter` parameter, the resources for all matching provisions members are returned. - * - * When a user with a SAML-provisioned external identity leaves (or is removed from) an organization, the account's metadata is immediately removed. However, the returned list of user accounts might not always match the organization or enterprise member list you see on GitHub. This can happen in certain cases where an external identity associated with an organization will not match an organization member: - * - When a user with a SCIM-provisioned external identity is removed from an organization, the account's metadata is preserved to allow the user to re-join the organization in the future. - * - When inviting a user to join an organization, you can expect to see their external identity in the results before they accept the invitation, or if the invitation is cancelled (or never accepted). - * - When a user is invited over SCIM, an external identity is created that matches with the invitee's email address. However, this identity is only linked to a user account when the user accepts the invitation by going through SAML SSO. - * - * The returned list of external identities can include an entry for a `null` user. These are unlinked SAML identities that are created when a user goes through the following Single Sign-On (SSO) process but does not sign in to their GitHub account after completing SSO: - * - * 1. The user is granted access by the IdP and is not a member of the GitHub organization. - * - * 1. The user attempts to access the GitHub organization and initiates the SAML SSO process, and is not currently signed in to their GitHub account. - * - * 1. After successfully authenticating with the SAML SSO IdP, the `null` external identity entry is created and the user is prompted to sign in to their GitHub account: - * - If the user signs in, their GitHub account is linked to this entry. - * - If the user does not sign in (or does not create a new account when prompted), they are not added to the GitHub organization, and the external identity `null` entry remains in place. - */ - "scim/list-provisioned-identities": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - query: { - /** Used for pagination: the index of the first result to return. */ - startIndex?: number; - /** Used for pagination: the number of results to return. */ - count?: number; - /** - * Filters results using the equals query parameter operator (`eq`). You can filter results that are equal to `id`, `userName`, `emails`, and `external_id`. For example, to search for an identity with the `userName` Octocat, you would use this query: - * - * `?filter=userName%20eq%20\"Octocat\"`. - * - * To filter results for the identity with the email `octocat@github.com`, you would use this query: - * - * `?filter=emails%20eq%20\"octocat@github.com\"`. - */ - filter?: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/scim+json": components["schemas"]["scim-user-list"]; - }; - }; - 304: components["responses"]["not_modified"]; - 400: components["responses"]["scim_bad_request"]; - 403: components["responses"]["scim_forbidden"]; - 404: components["responses"]["scim_not_found"]; - }; - }; - /** Provision organization membership for a user, and send an activation email to the email address. */ - "scim/provision-and-invite-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/scim+json": components["schemas"]["scim-user"]; - }; - }; - 304: components["responses"]["not_modified"]; - 400: components["responses"]["scim_bad_request"]; - 403: components["responses"]["scim_forbidden"]; - 404: components["responses"]["scim_not_found"]; - 409: components["responses"]["scim_conflict"]; - 500: components["responses"]["scim_internal_error"]; - }; - requestBody: { - content: { - "application/json": { - /** Configured by the admin. Could be an email, login, or username */ - userName: string; - /** The name of the user, suitable for display to end-users */ - displayName?: string; - name: { - givenName: string; - familyName: string; - formatted?: string; - }; - /** user emails */ - emails: { - value: string; - primary?: boolean; - type?: string; - }[]; - schemas?: string[]; - externalId?: string; - groups?: string[]; - active?: boolean; - }; - }; - }; - }; - "scim/get-provisioning-information-for-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** scim_user_id parameter */ - scim_user_id: components["parameters"]["scim_user_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/scim+json": components["schemas"]["scim-user"]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["scim_forbidden"]; - 404: components["responses"]["scim_not_found"]; - }; - }; - /** - * Replaces an existing provisioned user's information. You must provide all the information required for the user as if you were provisioning them for the first time. Any existing user information that you don't provide will be removed. If you want to only update a specific attribute, use the [Update an attribute for a SCIM user](https://docs.github.com/rest/reference/scim#update-an-attribute-for-a-scim-user) endpoint instead. - * - * You must at least provide the required values for the user: `userName`, `name`, and `emails`. - * - * **Warning:** Setting `active: false` removes the user from the organization, deletes the external identity, and deletes the associated `{scim_user_id}`. - */ - "scim/set-information-for-provisioned-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** scim_user_id parameter */ - scim_user_id: components["parameters"]["scim_user_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/scim+json": components["schemas"]["scim-user"]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["scim_forbidden"]; - 404: components["responses"]["scim_not_found"]; - }; - requestBody: { - content: { - "application/json": { - schemas?: string[]; - /** The name of the user, suitable for display to end-users */ - displayName?: string; - externalId?: string; - groups?: string[]; - active?: boolean; - /** Configured by the admin. Could be an email, login, or username */ - userName: string; - name: { - givenName: string; - familyName: string; - formatted?: string; - }; - /** user emails */ - emails: { - type?: string; - value: string; - primary?: boolean; - }[]; - }; - }; - }; - }; - "scim/delete-user-from-org": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** scim_user_id parameter */ - scim_user_id: components["parameters"]["scim_user_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["scim_forbidden"]; - 404: components["responses"]["scim_not_found"]; - }; - }; - /** - * Allows you to change a provisioned user's individual attributes. To change a user's values, you must provide a specific `Operations` JSON format that contains at least one of the `add`, `remove`, or `replace` operations. For examples and more information on the SCIM operations format, see the [SCIM specification](https://tools.ietf.org/html/rfc7644#section-3.5.2). - * - * **Note:** Complicated SCIM `path` selectors that include filters are not supported. For example, a `path` selector defined as `"path": "emails[type eq \"work\"]"` will not work. - * - * **Warning:** If you set `active:false` using the `replace` operation (as shown in the JSON example below), it removes the user from the organization, deletes the external identity, and deletes the associated `:scim_user_id`. - * - * ``` - * { - * "Operations":[{ - * "op":"replace", - * "value":{ - * "active":false - * } - * }] - * } - * ``` - */ - "scim/update-attribute-for-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - /** scim_user_id parameter */ - scim_user_id: components["parameters"]["scim_user_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/scim+json": components["schemas"]["scim-user"]; - }; - }; - 304: components["responses"]["not_modified"]; - 400: components["responses"]["scim_bad_request"]; - 403: components["responses"]["scim_forbidden"]; - 404: components["responses"]["scim_not_found"]; - /** Response */ - 429: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - schemas?: string[]; - /** Set of operations to be performed */ - Operations: { - op: "add" | "remove" | "replace"; - path?: string; - value?: { - active?: boolean | null; - userName?: string | null; - externalId?: string | null; - givenName?: string | null; - familyName?: string | null; - } | { - value?: string; - primary?: boolean; - }[] | string; - }[]; - }; - }; - }; - }; - /** - * Searches for query terms inside of a file. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for code, you can get text match metadata for the file **content** and file **path** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to find the definition of the `addClass` function inside [jQuery](https://github.com/jquery/jquery) repository, your query would look something like this: - * - * `q=addClass+in:file+language:js+repo:jquery/jquery` - * - * This query searches for the keyword `addClass` within a file's contents. The query limits the search to files where the language is JavaScript in the `jquery/jquery` repository. - * - * #### Considerations for code search - * - * Due to the complexity of searching code, there are a few restrictions on how searches are performed: - * - * * Only the _default branch_ is considered. In most cases, this will be the `master` branch. - * * Only files smaller than 384 KB are searchable. - * * You must always include at least one search term when searching source code. For example, searching for [`language:go`](https://github.com/search?utf8=%E2%9C%93&q=language%3Ago&type=Code) is not valid, while [`amazing - * language:go`](https://github.com/search?utf8=%E2%9C%93&q=amazing+language%3Ago&type=Code) is. - */ - "search/code": { - parameters: { - query: { - /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as GitHub.com. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching code](https://help.github.com/articles/searching-code/)" for a detailed list of qualifiers. */ - q: string; - /** Sorts the results of your query. Can only be `indexed`, which indicates how recently a file has been indexed by the GitHub search infrastructure. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ - sort?: "indexed"; - /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ - order?: components["parameters"]["order"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - incomplete_results: boolean; - items: components["schemas"]["code-search-result-item"][]; - }; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** - * Find commits via various criteria on the default branch (usually `master`). This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for commits, you can get text match metadata for the **message** field when you provide the `text-match` media type. For more details about how to receive highlighted search results, see [Text match - * metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to find commits related to CSS in the [octocat/Spoon-Knife](https://github.com/octocat/Spoon-Knife) repository. Your query would look something like this: - * - * `q=repo:octocat/Spoon-Knife+css` - */ - "search/commits": { - parameters: { - query: { - /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as GitHub.com. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching commits](https://help.github.com/articles/searching-commits/)" for a detailed list of qualifiers. */ - q: string; - /** Sorts the results of your query by `author-date` or `committer-date`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ - sort?: "author-date" | "committer-date"; - /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ - order?: components["parameters"]["order"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - incomplete_results: boolean; - items: components["schemas"]["commit-search-result-item"][]; - }; - }; - }; - 304: components["responses"]["not_modified"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** - * Find issues by state and keyword. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for issues, you can get text match metadata for the issue **title**, issue **body**, and issue **comment body** fields when you pass the `text-match` media type. For more details about how to receive highlighted - * search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this. - * - * `q=windows+label:bug+language:python+state:open&sort=created&order=asc` - * - * This query searches for the keyword `windows`, within any open issue that is labeled as `bug`. The search runs across repositories whose primary language is Python. The results are sorted by creation date in ascending order, which means the oldest issues appear first in the search results. - * - * **Note:** For [user-to-server](https://docs.github.com/developers/apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests) GitHub App requests, you can't retrieve a combination of issues and pull requests in a single query. Requests that don't include the `is:issue` or `is:pull-request` qualifier will receive an HTTP `422 Unprocessable Entity` response. To get results for both issues and pull requests, you must send separate queries for issues and pull requests. For more information about the `is` qualifier, see "[Searching only issues or pull requests](https://docs.github.com/github/searching-for-information-on-github/searching-issues-and-pull-requests#search-only-issues-or-pull-requests)." - */ - "search/issues-and-pull-requests": { - parameters: { - query: { - /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as GitHub.com. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching issues and pull requests](https://help.github.com/articles/searching-issues-and-pull-requests/)" for a detailed list of qualifiers. */ - q: string; - /** Sorts the results of your query by the number of `comments`, `reactions`, `reactions-+1`, `reactions--1`, `reactions-smile`, `reactions-thinking_face`, `reactions-heart`, `reactions-tada`, or `interactions`. You can also sort results by how recently the items were `created` or `updated`, Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ - sort?: "comments" | "reactions" | "reactions-+1" | "reactions--1" | "reactions-smile" | "reactions-thinking_face" | "reactions-heart" | "reactions-tada" | "interactions" | "created" | "updated"; - /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ - order?: components["parameters"]["order"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - incomplete_results: boolean; - items: components["schemas"]["issue-search-result-item"][]; - }; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** - * Find labels in a repository with names or descriptions that match search keywords. Returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for labels, you can get text match metadata for the label **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to find labels in the `linguist` repository that match `bug`, `defect`, or `enhancement`. Your query might look like this: - * - * `q=bug+defect+enhancement&repository_id=64778136` - * - * The labels that best match the query appear first in the search results. - */ - "search/labels": { - parameters: { - query: { - /** The id of the repository. */ - repository_id: number; - /** The search keywords. This endpoint does not accept qualifiers in the query. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). */ - q: string; - /** Sorts the results of your query by when the label was `created` or `updated`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ - sort?: "created" | "updated"; - /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ - order?: components["parameters"]["order"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - incomplete_results: boolean; - items: components["schemas"]["label-search-result-item"][]; - }; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** - * Find repositories via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for repositories, you can get text match metadata for the **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to search for popular Tetris repositories written in assembly code, your query might look like this: - * - * `q=tetris+language:assembly&sort=stars&order=desc` - * - * This query searches for repositories with the word `tetris` in the name, the description, or the README. The results are limited to repositories where the primary language is assembly. The results are sorted by stars in descending order, so that the most popular repositories appear first in the search results. - * - * When you include the `mercy` preview header, you can also search for multiple topics by adding more `topic:` instances. For example, your query might look like this: - * - * `q=topic:ruby+topic:rails` - */ - "search/repos": { - parameters: { - query: { - /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as GitHub.com. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching for repositories](https://help.github.com/articles/searching-for-repositories/)" for a detailed list of qualifiers. */ - q: string; - /** Sorts the results of your query by number of `stars`, `forks`, or `help-wanted-issues` or how recently the items were `updated`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ - sort?: "stars" | "forks" | "help-wanted-issues" | "updated"; - /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ - order?: components["parameters"]["order"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - incomplete_results: boolean; - items: components["schemas"]["repo-search-result-item"][]; - }; - }; - }; - 304: components["responses"]["not_modified"]; - 422: components["responses"]["validation_failed"]; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** - * Find topics via various criteria. Results are sorted by best match. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). See "[Searching topics](https://help.github.com/articles/searching-topics/)" for a detailed list of qualifiers. - * - * When searching for topics, you can get text match metadata for the topic's **short\_description**, **description**, **name**, or **display\_name** field when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you want to search for topics related to Ruby that are featured on https://github.com/topics. Your query might look like this: - * - * `q=ruby+is:featured` - * - * This query searches for topics with the keyword `ruby` and limits the results to find only topics that are featured. The topics that are the best match for the query appear first in the search results. - */ - "search/topics": { - parameters: { - query: { - /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as GitHub.com. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). */ - q: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - incomplete_results: boolean; - items: components["schemas"]["topic-search-result-item"][]; - }; - }; - }; - 304: components["responses"]["not_modified"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** - * Find users via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). - * - * When searching for users, you can get text match metadata for the issue **login**, **email**, and **name** fields when you pass the `text-match` media type. For more details about highlighting search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). - * - * For example, if you're looking for a list of popular users, you might try this query: - * - * `q=tom+repos:%3E42+followers:%3E1000` - * - * This query searches for users with the name `tom`. The results are restricted to users with more than 42 repositories and over 1,000 followers. - */ - "search/users": { - parameters: { - query: { - /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as GitHub.com. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching users](https://help.github.com/articles/searching-users/)" for a detailed list of qualifiers. */ - q: string; - /** Sorts the results of your query by number of `followers` or `repositories`, or when the person `joined` GitHub. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ - sort?: "followers" | "repositories" | "joined"; - /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ - order?: components["parameters"]["order"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": { - total_count: number; - incomplete_results: boolean; - items: components["schemas"]["user-search-result-item"][]; - }; - }; - }; - 304: components["responses"]["not_modified"]; - 422: components["responses"]["validation_failed"]; - 503: components["responses"]["service_unavailable"]; - }; - }; - /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the [Get a team by name](https://docs.github.com/rest/reference/teams#get-a-team-by-name) endpoint. */ - "teams/get-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-full"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a team](https://docs.github.com/rest/reference/teams#delete-a-team) endpoint. - * - * To delete a team, the authenticated user must be an organization owner or team maintainer. - * - * If you are an organization owner, deleting a parent team will delete all of its child teams as well. - */ - "teams/delete-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a team](https://docs.github.com/rest/reference/teams#update-a-team) endpoint. - * - * To edit a team, the authenticated user must either be an organization owner or a team maintainer. - * - * **Note:** With nested teams, the `privacy` for parent teams cannot be `secret`. - */ - "teams/update-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-full"]; - }; - }; - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["team-full"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The name of the team. */ - name: string; - /** The description of the team. */ - description?: string; - /** - * The level of privacy this team should have. Editing teams without specifying this parameter leaves `privacy` intact. The options are: - * **For a non-nested team:** - * \* `secret` - only visible to organization owners and members of this team. - * \* `closed` - visible to all members of this organization. - * **For a parent or child team:** - * \* `closed` - visible to all members of this organization. - */ - privacy?: "secret" | "closed"; - /** - * **Deprecated**. The permission that new repositories will be added to the team with when none is specified. Can be one of: - * \* `pull` - team members can pull, but not push to or administer newly-added repositories. - * \* `push` - team members can pull and push, but not administer newly-added repositories. - * \* `admin` - team members can pull, push and administer newly-added repositories. - */ - permission?: "pull" | "push" | "admin"; - /** The ID of a team to set as the parent team. */ - parent_team_id?: number | null; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List discussions`](https://docs.github.com/rest/reference/teams#list-discussions) endpoint. - * - * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - "teams/list-discussions-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - }; - query: { - /** One of `asc` (ascending) or `desc` (descending). */ - direction?: components["parameters"]["direction"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["team-discussion"][]; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create a discussion`](https://docs.github.com/rest/reference/teams#create-a-discussion) endpoint. - * - * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - "teams/create-discussion-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["team-discussion"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The discussion post's title. */ - title: string; - /** The discussion post's body text. */ - body: string; - /** Private posts are only visible to team members, organization owners, and team maintainers. Public posts are visible to all members of the organization. Set to `true` to create a private post. */ - private?: boolean; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion](https://docs.github.com/rest/reference/teams#get-a-discussion) endpoint. - * - * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - "teams/get-discussion-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-discussion"]; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Delete a discussion`](https://docs.github.com/rest/reference/teams#delete-a-discussion) endpoint. - * - * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - "teams/delete-discussion-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion](https://docs.github.com/rest/reference/teams#update-a-discussion) endpoint. - * - * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - "teams/update-discussion-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-discussion"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The discussion post's title. */ - title?: string; - /** The discussion post's body text. */ - body?: string; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List discussion comments](https://docs.github.com/rest/reference/teams#list-discussion-comments) endpoint. - * - * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - "teams/list-discussion-comments-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - query: { - /** One of `asc` (ascending) or `desc` (descending). */ - direction?: components["parameters"]["direction"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["team-discussion-comment"][]; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Create a discussion comment](https://docs.github.com/rest/reference/teams#create-a-discussion-comment) endpoint. - * - * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - * - * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in abuse rate limiting. See "[Abuse rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#abuse-rate-limits)" and "[Dealing with abuse rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. - */ - "teams/create-discussion-comment-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["team-discussion-comment"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The discussion comment's body text. */ - body: string; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion comment](https://docs.github.com/rest/reference/teams#get-a-discussion-comment) endpoint. - * - * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - "teams/get-discussion-comment-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - discussion_number: components["parameters"]["discussion-number"]; - comment_number: components["parameters"]["comment-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-discussion-comment"]; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a discussion comment](https://docs.github.com/rest/reference/teams#delete-a-discussion-comment) endpoint. - * - * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - "teams/delete-discussion-comment-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - discussion_number: components["parameters"]["discussion-number"]; - comment_number: components["parameters"]["comment-number"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion comment](https://docs.github.com/rest/reference/teams#update-a-discussion-comment) endpoint. - * - * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - "teams/update-discussion-comment-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - discussion_number: components["parameters"]["discussion-number"]; - comment_number: components["parameters"]["comment-number"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-discussion-comment"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The discussion comment's body text. */ - body: string; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion comment`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment) endpoint. - * - * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - "reactions/list-for-team-discussion-comment-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - discussion_number: components["parameters"]["discussion-number"]; - comment_number: components["parameters"]["comment-number"]; - }; - query: { - /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion comment. */ - content?: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["reaction"][]; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Create reaction for a team discussion comment](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion-comment)" endpoint. - * - * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion comment. - */ - "reactions/create-for-team-discussion-comment-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - discussion_number: components["parameters"]["discussion-number"]; - comment_number: components["parameters"]["comment-number"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion comment. */ - content: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion) endpoint. - * - * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). - */ - "reactions/list-for-team-discussion-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - query: { - /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion. */ - content?: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["reaction"][]; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create reaction for a team discussion`](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion) endpoint. - * - * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with a `Status: 200 OK` means that you already added the reaction type to this team discussion. - */ - "reactions/create-for-team-discussion-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - discussion_number: components["parameters"]["discussion-number"]; - }; - }; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["reaction"]; - }; - }; - }; - requestBody: { - content: { - "application/json": { - /** The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion. */ - content: "+1" | "-1" | "laugh" | "confused" | "heart" | "hooray" | "rocket" | "eyes"; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List pending team invitations`](https://docs.github.com/rest/reference/teams#list-pending-team-invitations) endpoint. - * - * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. - */ - "teams/list-pending-invitations-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["organization-invitation"][]; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team members`](https://docs.github.com/rest/reference/teams#list-team-members) endpoint. - * - * Team members will include the members of child teams. - */ - "teams/list-members-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - }; - query: { - /** - * Filters members returned by their role in the team. Can be one of: - * \* `member` - normal members of the team. - * \* `maintainer` - team maintainers. - * \* `all` - all members of the team. - */ - role?: "member" | "maintainer" | "all"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * The "Get team member" endpoint (described below) is deprecated. - * - * We recommend using the [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint instead. It allows you to get both active and pending memberships. - * - * To list members in a team, the team must be visible to the authenticated user. - */ - "teams/get-member-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** if user is a member */ - 204: never; - /** if user is not a member */ - 404: unknown; - }; - }; - /** - * The "Add team member" endpoint (described below) is deprecated. - * - * We recommend using the [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint instead. It allows you to invite new organization members to your teams. - * - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To add someone to a team, the authenticated user must be an organization owner or a team maintainer in the team they're changing. The person being added to the team must be a member of the team's organization. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - * - * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - "teams/add-member-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - 403: components["responses"]["forbidden"]; - /** Not Found if team synchronization is set up */ - 404: unknown; - /** Unprocessable Entity if you attempt to add an organization to a team or you attempt to add a user to a team when they are not a member of at least one other team in the same organization */ - 422: unknown; - }; - }; - /** - * The "Remove team member" endpoint (described below) is deprecated. - * - * We recommend using the [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint instead. It allows you to remove both active and pending memberships. - * - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To remove a team member, the authenticated user must have 'admin' permissions to the team or be an owner of the org that the team is associated with. Removing a team member does not delete the user, it just removes them from the team. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - */ - "teams/remove-member-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - /** Not Found if team synchronization is setup */ - 404: unknown; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint. - * - * Team members will include the members of child teams. - * - * To get a user's membership with a team, the team must be visible to the authenticated user. - * - * **Note:** - * The response contains the `state` of the membership and the member's `role`. - * - * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). - */ - "teams/get-membership-for-user-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-membership"]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint. - * - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * If the user is already a member of the team's organization, this endpoint will add the user to the team. To add a membership between an organization member and a team, the authenticated user must be an organization owner or a team maintainer. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - * - * If the user is unaffiliated with the team's organization, this endpoint will send an invitation to the user via email. This newly-created membership will be in the "pending" state until the user accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. To add a membership between an unaffiliated user and a team, the authenticated user must be an organization owner. - * - * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. - */ - "teams/add-or-update-membership-for-user-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-membership"]; - }; - }; - /** Forbidden if team synchronization is set up */ - 403: unknown; - 404: components["responses"]["not_found"]; - /** Unprocessable Entity if you attempt to add an organization to a team */ - 422: unknown; - }; - requestBody: { - content: { - "application/json": { - /** - * The role that this user should have in the team. Can be one of: - * \* `member` - a normal member of the team. - * \* `maintainer` - a team maintainer. Able to add/remove other team members, promote other team members to team maintainer, and edit the team's name and description. - */ - role?: "member" | "maintainer"; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint. - * - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. - * - * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://help.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." - */ - "teams/remove-membership-for-user-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - /** if team synchronization is set up */ - 403: unknown; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team projects`](https://docs.github.com/rest/reference/teams#list-team-projects) endpoint. - * - * Lists the organization projects for a team. - */ - "teams/list-projects-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["team-project"][]; - }; - }; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a project](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project) endpoint. - * - * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. - */ - "teams/check-permissions-for-project-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - project_id: components["parameters"]["project-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["team-project"]; - }; - }; - /** Not Found if project is not managed by this team */ - 404: unknown; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team project permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions) endpoint. - * - * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. - */ - "teams/add-or-update-project-permissions-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - project_id: components["parameters"]["project-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - /** Forbidden if the project is not owned by the organization */ - 403: { - content: { - "application/json": { - message?: string; - documentation_url?: string; - }; - }; - }; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** - * The permission to grant to the team for this project. Can be one of: - * \* `read` - team members can read, but not write to or administer this project. - * \* `write` - team members can read and write, but not administer this project. - * \* `admin` - team members can read, write and administer this project. - * Default: the team's `permission` attribute will be used to determine what permission to grant the team on this project. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - permission?: "read" | "write" | "admin"; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a project from a team](https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team) endpoint. - * - * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. **Note:** This endpoint removes the project from the team, but does not delete it. - */ - "teams/remove-project-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - project_id: components["parameters"]["project-id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List team repositories](https://docs.github.com/rest/reference/teams#list-team-repositories) endpoint. */ - "teams/list-repos-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["minimal-repository"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * **Note**: Repositories inherited through a parent team will also be checked. - * - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a repository](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-repository) endpoint. - * - * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: - */ - "teams/check-permissions-for-repo-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Alternative response with extra repository information */ - 200: { - content: { - "application/json": components["schemas"]["team-repository"]; - }; - }; - /** Response if repository is managed by this team */ - 204: never; - /** Not Found if repository is not managed by this team */ - 404: unknown; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Add or update team repository permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-repository-permissions)" endpoint. - * - * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. - * - * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - */ - "teams/add-or-update-repo-permissions-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** - * The permission to grant the team on this repository. Can be one of: - * \* `pull` - team members can pull, but not push to or administer this repository. - * \* `push` - team members can pull and push, but not administer this repository. - * \* `admin` - team members can pull, push and administer this repository. - * - * If no permission is specified, the team's `permission` attribute will be used to determine what permission to grant the team on this repository. - */ - permission?: "pull" | "push" | "admin"; - }; - }; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a repository from a team](https://docs.github.com/rest/reference/teams#remove-a-repository-from-a-team) endpoint. - * - * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. NOTE: This does not delete the repository, it just removes it from the team. - */ - "teams/remove-repo-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List IdP groups for a team`](https://docs.github.com/rest/reference/teams#list-idp-groups-for-a-team) endpoint. - * - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * List IdP groups connected to a team on GitHub. - */ - "teams/list-idp-groups-for-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["group-mapping"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create or update IdP group connections`](https://docs.github.com/rest/reference/teams#create-or-update-idp-group-connections) endpoint. - * - * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://help.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. - * - * Creates, updates, or removes a connection between a team and an IdP group. When adding groups to a team, you must include all new and existing groups to avoid replacing existing groups with the new ones. Specifying an empty `groups` array will remove all connections for a team. - */ - "teams/create-or-update-idp-group-connections-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["group-mapping"]; - }; - }; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The IdP groups you want to connect to a GitHub team. When updating, the new `groups` object will replace the original one. You must include any existing groups that you don't want to remove. */ - groups: { - /** ID of the IdP group. */ - group_id: string; - /** Name of the IdP group. */ - group_name: string; - /** Description of the IdP group. */ - group_description: string; - id?: string; - name?: string; - description?: string; - }[]; - synced_at?: string; - }; - }; - }; - }; - /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List child teams`](https://docs.github.com/rest/reference/teams#list-child-teams) endpoint. */ - "teams/list-child-legacy": { - parameters: { - path: { - team_id: components["parameters"]["team-id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** if child teams exist */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["team"][]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** - * If the authenticated user is authenticated through basic authentication or OAuth with the `user` scope, then the response lists public and private profile information. - * - * If the authenticated user is authenticated through OAuth without the `user` scope, then the response lists only public profile information. - */ - "users/get-authenticated": { - parameters: {}; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["private-user"] | components["schemas"]["public-user"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** **Note:** If your email is set to private and you send an `email` parameter as part of this request to update your profile, your privacy settings are still enforced: the email address will not be displayed on your public profile or via the API. */ - "users/update-authenticated": { - parameters: {}; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["private-user"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The new name of the user. */ - name?: string; - /** The publicly visible email address of the user. */ - email?: string; - /** The new blog URL of the user. */ - blog?: string; - /** The new Twitter username of the user. */ - twitter_username?: string | null; - /** The new company of the user. */ - company?: string; - /** The new location of the user. */ - location?: string; - /** The new hiring availability of the user. */ - hireable?: boolean; - /** The new short biography of the user. */ - bio?: string; - }; - }; - }; - }; - /** List the users you've blocked on your personal account. */ - "users/list-blocked-by-authenticated": { - parameters: {}; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - "users/check-blocked": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - }; - responses: { - /** If the user is blocked: */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - /** If the user is not blocked: */ - 404: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - }; - }; - "users/block": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - "users/unblock": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Sets the visibility for your primary email addresses. */ - "users/set-primary-email-visibility-for-authenticated": { - parameters: {}; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["email"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** An email address associated with the GitHub user account to manage. */ - email?: string; - /** Denotes whether an email is publically visible. */ - visibility: "public" | "private"; - }; - }; - }; - }; - /** Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. */ - "users/list-emails-for-authenticated": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["email"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** This endpoint is accessible with the `user` scope. */ - "users/add-email-for-authenticated": { - parameters: {}; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["email"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Adds one or more email addresses to your GitHub account. Must contain at least one email address. **Note:** Alternatively, you can pass a single email address or an `array` of emails addresses directly, but we recommend that you pass an object using the `emails` key. */ - emails: string[]; - }; - }; - }; - }; - /** This endpoint is accessible with the `user` scope. */ - "users/delete-email-for-authenticated": { - parameters: {}; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Email addresses associated with the GitHub user account. */ - emails: string[]; - }; - }; - }; - }; - /** Lists the people following the authenticated user. */ - "users/list-followers-for-authenticated-user": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** Lists the people who the authenticated user follows. */ - "users/list-followed-by-authenticated": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - "users/check-person-is-followed-by-authenticated": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - }; - responses: { - /** if the person is followed by the authenticated user */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - /** if the person is not followed by the authenticated user */ - 404: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - }; - }; - /** - * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." - * - * Following a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. - */ - "users/follow": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Unfollowing a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. */ - "users/unfollow": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - "users/list-gpg-keys-for-authenticated": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["gpg-key"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - "users/create-gpg-key-for-authenticated": { - parameters: {}; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["gpg-key"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** A GPG key in ASCII-armored format. */ - armored_public_key: string; - }; - }; - }; - }; - /** View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - "users/get-gpg-key-for-authenticated": { - parameters: { - path: { - /** gpg_key_id parameter */ - gpg_key_id: components["parameters"]["gpg_key_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["gpg-key"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - "users/delete-gpg-key-for-authenticated": { - parameters: { - path: { - /** gpg_key_id parameter */ - gpg_key_id: components["parameters"]["gpg_key_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** - * Lists installations of your GitHub App that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. - * - * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. - * - * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. - * - * You can find the permissions for the installation under the `permissions` key. - */ - "apps/list-installations-for-authenticated-user": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** You can find the permissions for the installation under the `permissions` key. */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - installations: components["schemas"]["installation"][]; - }; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 415: components["responses"]["preview_header_missing"]; - }; - }; - /** - * List repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access for an installation. - * - * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. - * - * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. - * - * The access the user has to each repository is included in the hash under the `permissions` key. - */ - "apps/list-installation-repos-for-authenticated-user": { - parameters: { - path: { - /** installation_id parameter */ - installation_id: components["parameters"]["installation_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** The access the user has to each repository is included in the hash under the `permissions` key. */ - 200: { - headers: {}; - content: { - "application/json": { - total_count: number; - repository_selection?: string; - repositories: components["schemas"]["repository"][]; - }; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Add a single repository to an installation. The authenticated user must have admin access to the repository. - * - * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. - */ - "apps/add-repo-to-installation": { - parameters: { - path: { - /** installation_id parameter */ - installation_id: components["parameters"]["installation_id"]; - repository_id: components["parameters"]["repository_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Remove a single repository from an installation. The authenticated user must have admin access to the repository. - * - * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. - */ - "apps/remove-repo-from-installation": { - parameters: { - path: { - /** installation_id parameter */ - installation_id: components["parameters"]["installation_id"]; - repository_id: components["parameters"]["repository_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Shows which type of GitHub user can interact with your public repositories and when the restriction expires. */ - "interactions/get-restrictions-for-authenticated-user": { - responses: { - /** Default response */ - 200: { - content: { - "application/json": Partial & Partial<{ - [key: string]: any; - }>; - }; - }; - /** Response when there are no restrictions */ - 204: never; - }; - }; - /** Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. */ - "interactions/set-restrictions-for-authenticated-user": { - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["interaction-limit-response"]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": components["schemas"]["interaction-limit"]; - }; - }; - }; - /** Removes any interaction restrictions from your public repositories. */ - "interactions/remove-restrictions-for-authenticated-user": { - responses: { - /** Response */ - 204: never; - }; - }; - /** - * List issues across owned and member repositories assigned to the authenticated user. - * - * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this - * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by - * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull - * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. - */ - "issues/list-for-authenticated-user": { - parameters: { - query: { - /** - * Indicates which sorts of issues to return. Can be one of: - * \* `assigned`: Issues assigned to you - * \* `created`: Issues created by you - * \* `mentioned`: Issues mentioning you - * \* `subscribed`: Issues you're subscribed to updates for - * \* `all`: All issues the authenticated user can see, regardless of participation or creation - */ - filter?: "assigned" | "created" | "mentioned" | "subscribed" | "repos" | "all"; - /** Indicates the state of the issues to return. Can be either `open`, `closed`, or `all`. */ - state?: "open" | "closed" | "all"; - /** A list of comma separated label names. Example: `bug,ui,@high` */ - labels?: components["parameters"]["labels"]; - /** What to sort results by. Can be either `created`, `updated`, `comments`. */ - sort?: "created" | "updated" | "comments"; - /** One of `asc` (ascending) or `desc` (descending). */ - direction?: components["parameters"]["direction"]; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["issue"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - "users/list-public-ssh-keys-for-authenticated": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["key"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - "users/create-public-ssh-key-for-authenticated": { - parameters: {}; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["key"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** A descriptive name for the new key. */ - title?: string; - /** The public SSH key to add to your GitHub account. */ - key: string; - }; - }; - }; - }; - /** View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - "users/get-public-ssh-key-for-authenticated": { - parameters: { - path: { - /** key_id parameter */ - key_id: components["parameters"]["key_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["key"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ - "users/delete-public-ssh-key-for-authenticated": { - parameters: { - path: { - /** key_id parameter */ - key_id: components["parameters"]["key_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). */ - "apps/list-subscriptions-for-authenticated-user": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["user-marketplace-purchase"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). */ - "apps/list-subscriptions-for-authenticated-user-stubbed": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["user-marketplace-purchase"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - }; - }; - "orgs/list-memberships-for-authenticated-user": { - parameters: { - query: { - /** Indicates the state of the memberships to return. Can be either `active` or `pending`. If not specified, the API returns both active and pending memberships. */ - state?: "active" | "pending"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["org-membership"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - }; - "orgs/get-membership-for-authenticated-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["org-membership"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - "orgs/update-membership-for-authenticated-user": { - parameters: { - path: { - org: components["parameters"]["org"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["org-membership"]; - }; - }; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The state that the membership should be in. Only `"active"` will be accepted. */ - state: "active"; - }; - }; - }; - }; - /** Lists all migrations a user has started. */ - "migrations/list-for-authenticated-user": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["migration"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** Initiates the generation of a user migration archive. */ - "migrations/start-for-authenticated-user": { - parameters: {}; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["migration"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** Lock the repositories being migrated at the start of the migration */ - lock_repositories?: boolean; - /** Do not include attachments in the migration */ - exclude_attachments?: boolean; - /** Exclude attributes from the API response to improve performance */ - exclude?: "repositories"[]; - repositories: string[]; - }; - }; - }; - }; - /** - * Fetches a single user migration. The response includes the `state` of the migration, which can be one of the following values: - * - * * `pending` - the migration hasn't started yet. - * * `exporting` - the migration is in progress. - * * `exported` - the migration finished successfully. - * * `failed` - the migration failed. - * - * Once the migration has been `exported` you can [download the migration archive](https://docs.github.com/rest/reference/migrations#download-a-user-migration-archive). - */ - "migrations/get-status-for-authenticated-user": { - parameters: { - path: { - /** migration_id parameter */ - migration_id: components["parameters"]["migration_id"]; - }; - query: { - exclude?: string[]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["migration"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Fetches the URL to download the migration archive as a `tar.gz` file. Depending on the resources your repository uses, the migration archive can contain JSON files with data for these objects: - * - * * attachments - * * bases - * * commit\_comments - * * issue\_comments - * * issue\_events - * * issues - * * milestones - * * organizations - * * projects - * * protected\_branches - * * pull\_request\_reviews - * * pull\_requests - * * releases - * * repositories - * * review\_comments - * * schema - * * users - * - * The archive will also contain an `attachments` directory that includes all attachment files uploaded to GitHub.com and a `repositories` directory that contains the repository's Git data. - */ - "migrations/get-archive-for-authenticated-user": { - parameters: { - path: { - /** migration_id parameter */ - migration_id: components["parameters"]["migration_id"]; - }; - }; - responses: { - /** Response */ - 302: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** Deletes a previous migration archive. Downloadable migration archives are automatically deleted after seven days. Migration metadata, which is returned in the [List user migrations](https://docs.github.com/rest/reference/migrations#list-user-migrations) and [Get a user migration status](https://docs.github.com/rest/reference/migrations#get-a-user-migration-status) endpoints, will continue to be available even after an archive is deleted. */ - "migrations/delete-archive-for-authenticated-user": { - parameters: { - path: { - /** migration_id parameter */ - migration_id: components["parameters"]["migration_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Unlocks a repository. You can lock repositories when you [start a user migration](https://docs.github.com/rest/reference/migrations#start-a-user-migration). Once the migration is complete you can unlock each repository to begin using it again or [delete the repository](https://docs.github.com/rest/reference/repos#delete-a-repository) if you no longer need the source data. Returns a status of `404 Not Found` if the repository is not locked. */ - "migrations/unlock-repo-for-authenticated-user": { - parameters: { - path: { - /** migration_id parameter */ - migration_id: components["parameters"]["migration_id"]; - /** repo_name parameter */ - repo_name: components["parameters"]["repo_name"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Lists all the repositories for this user migration. */ - "migrations/list-repos-for-user": { - parameters: { - path: { - /** migration_id parameter */ - migration_id: components["parameters"]["migration_id"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["minimal-repository"][]; - }; - }; - 404: components["responses"]["not_found"]; - }; - }; - /** - * List organizations for the authenticated user. - * - * **OAuth scope requirements** - * - * This only lists organizations that your authorization allows you to operate on in some way (e.g., you can list teams with `read:org` scope, you can publicize your organization membership with `user` scope, etc.). Therefore, this API requires at least `user` or `read:org` scope. OAuth requests with insufficient scope receive a `403 Forbidden` response. - */ - "orgs/list-for-authenticated-user": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["organization-simple"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** - * Gets a specific package for a package owned by the authenticated user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/get-package-for-authenticated-user": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["package"]; - }; - }; - }; - }; - /** - * Deletes a package owned by the authenticated user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/delete-package-for-authenticated-user": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - }; - }; - responses: { - /** Response */ - 204: never; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Restores a package owned by the authenticated user. - * - * You can restore a deleted package under the following conditions: - * - The package was deleted within the last 30 days. - * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scope. If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/restore-package-for-authenticated-user": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - }; - query: { - /** package token */ - token?: string; - }; - }; - responses: { - /** Response */ - 204: never; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Returns all package versions for a package owned by the authenticated user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/get-all-package-versions-for-package-owned-by-authenticated-user": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - }; - query: { - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** The state of the package, either active or deleted. */ - state?: "active" | "deleted"; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["package-version"][]; - }; - }; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Gets a specific package version for a package owned by the authenticated user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/get-package-version-for-authenticated-user": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - /** Unique identifier of the package version. */ - package_version_id: components["parameters"]["package_version_id"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["package-version"]; - }; - }; - }; - }; - /** - * Deletes a specific package version for a package owned by the authenticated user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. - * - * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/delete-package-version-for-authenticated-user": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - /** Unique identifier of the package version. */ - package_version_id: components["parameters"]["package_version_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Restores a package version owned by the authenticated user. - * - * You can restore a deleted package version under the following conditions: - * - The package was deleted within the last 30 days. - * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scope. If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/restore-package-version-for-authenticated-user": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - /** Unique identifier of the package version. */ - package_version_id: components["parameters"]["package_version_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - "projects/create-for-authenticated-user": { - parameters: {}; - responses: { - /** Response */ - 201: { - content: { - "application/json": components["schemas"]["project"]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed_simple"]; - }; - requestBody: { - content: { - "application/json": { - /** Name of the project */ - name: string; - /** Body of the project */ - body?: string | null; - }; - }; - }; - }; - /** Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. */ - "users/list-public-emails-for-authenticated": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["email"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Lists repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. - * - * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. - */ - "repos/list-for-authenticated-user": { - parameters: { - query: { - /** Can be one of `all`, `public`, or `private`. Note: For GitHub AE, can be one of `all`, `internal`, or `private`. */ - visibility?: "all" | "public" | "private"; - /** - * Comma-separated list of values. Can include: - * \* `owner`: Repositories that are owned by the authenticated user. - * \* `collaborator`: Repositories that the user has been added to as a collaborator. - * \* `organization_member`: Repositories that the user has access to through being a member of an organization. This includes every repository on every team that the user is on. - */ - affiliation?: string; - /** - * Can be one of `all`, `owner`, `public`, `private`, `member`. Note: For GitHub AE, can be one of `all`, `owner`, `internal`, `private`, `member`. Default: `all` - * - * Will cause a `422` error if used in the same request as **visibility** or **affiliation**. Will cause a `422` error if used in the same request as **visibility** or **affiliation**. - */ - type?: "all" | "owner" | "public" | "private" | "member"; - /** Can be one of `created`, `updated`, `pushed`, `full_name`. */ - sort?: "created" | "updated" | "pushed" | "full_name"; - /** Can be one of `asc` or `desc`. Default: `asc` when using `full_name`, otherwise `desc` */ - direction?: "asc" | "desc"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - before?: components["parameters"]["before"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["repository"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** - * Creates a new repository for the authenticated user. - * - * **OAuth scope requirements** - * - * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: - * - * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. - * * `repo` scope to create a private repository. - */ - "repos/create-for-authenticated-user": { - parameters: {}; - responses: { - /** Response */ - 201: { - headers: { - Location?: string; - }; - content: { - "application/json": components["schemas"]["repository"]; - }; - }; - 304: components["responses"]["not_modified"]; - 400: components["responses"]["bad_request"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - requestBody: { - content: { - "application/json": { - /** The name of the repository. */ - name: string; - /** A short description of the repository. */ - description?: string; - /** A URL with more information about the repository. */ - homepage?: string; - /** Whether the repository is private. */ - private?: boolean; - /** Whether issues are enabled. */ - has_issues?: boolean; - /** Whether projects are enabled. */ - has_projects?: boolean; - /** Whether the wiki is enabled. */ - has_wiki?: boolean; - /** The id of the team that will be granted access to this repository. This is only valid when creating a repository in an organization. */ - team_id?: number; - /** Whether the repository is initialized with a minimal README. */ - auto_init?: boolean; - /** The desired language or platform to apply to the .gitignore. */ - gitignore_template?: string; - /** The license keyword of the open source license for this repository. */ - license_template?: string; - /** Whether to allow squash merges for pull requests. */ - allow_squash_merge?: boolean; - /** Whether to allow merge commits for pull requests. */ - allow_merge_commit?: boolean; - /** Whether to allow rebase merges for pull requests. */ - allow_rebase_merge?: boolean; - /** Whether to delete head branches when pull requests are merged */ - delete_branch_on_merge?: boolean; - /** Whether downloads are enabled. */ - has_downloads?: boolean; - /** Whether this repository acts as a template that can be used to generate new repositories. */ - is_template?: boolean; - }; - }; - }; - }; - /** When authenticating as a user, this endpoint will list all currently open repository invitations for that user. */ - "repos/list-invitations-for-authenticated-user": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["repository-invitation"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - "repos/decline-invitation": { - parameters: { - path: { - /** invitation_id parameter */ - invitation_id: components["parameters"]["invitation_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 409: components["responses"]["conflict"]; - }; - }; - "repos/accept-invitation": { - parameters: { - path: { - /** invitation_id parameter */ - invitation_id: components["parameters"]["invitation_id"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - 409: components["responses"]["conflict"]; - }; - }; - /** - * Lists repositories the authenticated user has starred. - * - * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: - */ - "activity/list-repos-starred-by-authenticated-user": { - parameters: { - query: { - /** One of `created` (when the repository was starred) or `updated` (when it was last pushed to). */ - sort?: components["parameters"]["sort"]; - /** One of `asc` (ascending) or `desc` (descending). */ - direction?: components["parameters"]["direction"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["repository"][]; - "application/vnd.github.v3.star+json": components["schemas"]["starred-repository"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - "activity/check-repo-is-starred-by-authenticated-user": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response if this repository is starred by you */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - /** Not Found if this repository is not starred by you */ - 404: { - content: { - "application/json": components["schemas"]["basic-error"]; - }; - }; - }; - }; - /** Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." */ - "activity/star-repo-for-authenticated-user": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - "activity/unstar-repo-for-authenticated-user": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - }; - }; - responses: { - /** Response */ - 204: never; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** Lists repositories the authenticated user is watching. */ - "activity/list-watched-repos-for-authenticated-user": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["minimal-repository"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - }; - }; - /** List all of the teams across all of the organizations to which the authenticated user belongs. This method requires `user`, `repo`, or `read:org` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/) when authenticating via [OAuth](https://docs.github.com/apps/building-oauth-apps/). */ - "teams/list-for-authenticated-user": { - parameters: { - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["team-full"][]; - }; - }; - 304: components["responses"]["not_modified"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Lists all users, in the order that they signed up on GitHub. This list includes personal user accounts and organization accounts. - * - * Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of users. - */ - "users/list": { - parameters: { - query: { - /** A user ID. Only return users with an ID greater than this ID. */ - since?: components["parameters"]["since-user"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: { - Link?: string; - }; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - 304: components["responses"]["not_modified"]; - }; - }; - /** - * Provides publicly available information about someone with a GitHub account. - * - * GitHub Apps with the `Plan` user permission can use this endpoint to retrieve information about a user's GitHub plan. The GitHub App must be authenticated as a user. See "[Identifying and authorizing users for GitHub Apps](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/)" for details about authentication. For an example response, see 'Response with GitHub plan information' below" - * - * The `email` key in the following response is the publicly visible email address from your GitHub [profile page](https://github.com/settings/profile). When setting up your profile, you can select a primary email address to be “public” which provides an email entry for this endpoint. If you do not set a public email address for `email`, then it will have a value of `null`. You only see publicly visible email addresses when authenticated with GitHub. For more information, see [Authentication](https://docs.github.com/rest/overview/resources-in-the-rest-api#authentication). - * - * The Emails API enables you to list all of your email addresses, and toggle a primary email to be visible publicly. For more information, see "[Emails API](https://docs.github.com/rest/reference/users#emails)". - */ - "users/get-by-username": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["private-user"] | components["schemas"]["public-user"]; - }; - }; - 202: components["responses"]["accepted"]; - 404: components["responses"]["not_found"]; - }; - }; - /** If you are authenticated as the given user, you will see your private events. Otherwise, you'll only see public events. */ - "activity/list-events-for-authenticated-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["event"][]; - }; - }; - }; - }; - /** This is the user's organization dashboard. You must be authenticated as the user to view this. */ - "activity/list-org-events-for-authenticated-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - org: components["parameters"]["org"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["event"][]; - }; - }; - }; - }; - "activity/list-public-events-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["event"][]; - }; - }; - }; - }; - /** Lists the people following the specified user. */ - "users/list-followers-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - }; - }; - /** Lists the people who the specified user follows. */ - "users/list-following-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["simple-user"][]; - }; - }; - }; - }; - "users/check-following-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - target_user: string; - }; - }; - responses: { - /** if the user follows the target user */ - 204: never; - /** if the user does not follow the target user */ - 404: unknown; - }; - }; - /** Lists public gists for the specified user: */ - "gists/list-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ - since?: components["parameters"]["since"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["base-gist"][]; - }; - }; - 422: components["responses"]["validation_failed"]; - }; - }; - /** Lists the GPG keys for a user. This information is accessible by anyone. */ - "users/list-gpg-keys-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["gpg-key"][]; - }; - }; - }; - }; - /** - * Provides hovercard information when authenticated through basic auth or OAuth with the `repo` scope. You can find out more about someone in relation to their pull requests, issues, repositories, and organizations. - * - * The `subject_type` and `subject_id` parameters provide context for the person's hovercard, which returns more information than without the parameters. For example, if you wanted to find out more about `octocat` who owns the `Spoon-Knife` repository via cURL, it would look like this: - * - * ```shell - * curl -u username:token - * https://api.github.com/users/octocat/hovercard?subject_type=repository&subject_id=1300192 - * ``` - */ - "users/get-context-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Identifies which additional information you'd like to receive about the person's hovercard. Can be `organization`, `repository`, `issue`, `pull_request`. **Required** when using `subject_id`. */ - subject_type?: "organization" | "repository" | "issue" | "pull_request"; - /** Uses the ID for the `subject_type` you specified. **Required** when using `subject_type`. */ - subject_id?: string; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["hovercard"]; - }; - }; - 404: components["responses"]["not_found"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** - * Enables an authenticated GitHub App to find the user’s installation information. - * - * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. - */ - "apps/get-user-installation": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["installation"]; - }; - }; - }; - }; - /** Lists the _verified_ public SSH keys for a user. This is accessible by anyone. */ - "users/list-public-keys-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["key-simple"][]; - }; - }; - }; - }; - /** - * List [public organization memberships](https://help.github.com/articles/publicizing-or-concealing-organization-membership) for the specified user. - * - * This method only lists _public_ memberships, regardless of authentication. If you need to fetch all of the organization memberships (public and private) for the authenticated user, use the [List organizations for the authenticated user](https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user) API instead. - */ - "orgs/list-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["organization-simple"][]; - }; - }; - }; - }; - /** - * Gets a specific package metadata for a public package owned by a user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/get-package-for-user": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["package"]; - }; - }; - }; - }; - /** - * Returns all package versions for a public package owned by a specified user. - * - * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/get-all-package-versions-for-package-owned-by-user": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["package-version"][]; - }; - }; - 401: components["responses"]["requires_authentication"]; - 403: components["responses"]["forbidden"]; - 404: components["responses"]["not_found"]; - }; - }; - /** - * Gets a specific package version for a public package owned by a specified user. - * - * At this time, to use this endpoint, you must authenticate using an access token with the `packages:read` scope. - * If `package_type` is not `container`, your token must also include the `repo` scope. - */ - "packages/get-package-version-for-user": { - parameters: { - path: { - /** The type of supported package. Can be one of `npm`, `maven`, `rubygems`, `nuget`, `docker`, or `container`. For Docker images that use the package namespace `https://ghcr.io/owner/package-name`, use `container`. */ - package_type: components["parameters"]["package_type"]; - /** The name of the package. */ - package_name: components["parameters"]["package_name"]; - /** Unique identifier of the package version. */ - package_version_id: components["parameters"]["package_version_id"]; - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["package-version"]; - }; - }; - }; - }; - "projects/list-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Indicates the state of the projects to return. Can be either `open`, `closed`, or `all`. */ - state?: "open" | "closed" | "all"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["project"][]; - }; - }; - 415: components["responses"]["preview_header_missing"]; - 422: components["responses"]["validation_failed"]; - }; - }; - /** These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events. */ - "activity/list-received-events-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["event"][]; - }; - }; - }; - }; - "activity/list-received-public-events-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["event"][]; - }; - }; - }; - }; - /** Lists public repositories for the specified user. Note: For GitHub AE, this endpoint will list internal repositories for the specified user. */ - "repos/list-for-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Can be one of `all`, `owner`, `member`. */ - type?: "all" | "owner" | "member"; - /** Can be one of `created`, `updated`, `pushed`, `full_name`. */ - sort?: "created" | "updated" | "pushed" | "full_name"; - /** Can be one of `asc` or `desc`. Default: `asc` when using `full_name`, otherwise `desc` */ - direction?: "asc" | "desc"; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["minimal-repository"][]; - }; - }; - }; - }; - /** - * Gets the summary of the free and paid GitHub Actions minutes used. - * - * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". - * - * Access tokens must have the `user` scope. - */ - "billing/get-github-actions-billing-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["actions-billing-usage"]; - }; - }; - }; - }; - /** - * Gets the free and paid storage used for GitHub Packages in gigabytes. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * Access tokens must have the `user` scope. - */ - "billing/get-github-packages-billing-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["packages-billing-usage"]; - }; - }; - }; - }; - /** - * Gets the estimated paid and estimated total storage used for GitHub Actions and Github Packages. - * - * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://help.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." - * - * Access tokens must have the `user` scope. - */ - "billing/get-shared-storage-billing-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["combined-billing-usage"]; - }; - }; - }; - }; - /** - * Lists repositories a user has starred. - * - * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: - */ - "activity/list-repos-starred-by-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** One of `created` (when the repository was starred) or `updated` (when it was last pushed to). */ - sort?: components["parameters"]["sort"]; - /** One of `asc` (ascending) or `desc` (descending). */ - direction?: components["parameters"]["direction"]; - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": Partial & Partial; - }; - }; - }; - }; - /** Lists repositories a user is watching. */ - "activity/list-repos-watched-by-user": { - parameters: { - path: { - username: components["parameters"]["username"]; - }; - query: { - /** Results per page (max 100). */ - per_page?: components["parameters"]["per_page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - headers: {}; - content: { - "application/json": components["schemas"]["minimal-repository"][]; - }; - }; - }; - }; - /** Get a random sentence from the Zen of GitHub */ - "meta/get-zen": { - responses: { - /** Response */ - 200: { - content: { - "text/plain": string; - }; - }; - }; - }; - /** - * Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. - * - * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. - * - * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. - * - * **Working with large comparisons** - * - * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." - * - * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. - * - * **Signature verification object** - * - * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: - * - * | Name | Type | Description | - * | ---- | ---- | ----------- | - * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | - * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | - * | `signature` | `string` | The signature that was extracted from the commit. | - * | `payload` | `string` | The value that was signed. | - * - * These are the possible values for `reason` in the `verification` object: - * - * | Value | Description | - * | ----- | ----------- | - * | `expired_key` | The key that made the signature is expired. | - * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | - * | `gpgverify_error` | There was an error communicating with the signature verification service. | - * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | - * | `unsigned` | The object does not include a signature. | - * | `unknown_signature_type` | A non-PGP signature was found in the commit. | - * | `no_user` | No user was associated with the `committer` email address in the commit. | - * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | - * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | - * | `unknown_key` | The key that made the signature has not been registered with any user's account. | - * | `malformed_signature` | There was an error parsing the signature. | - * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | - * | `valid` | None of the above errors applied, so the signature is considered to be verified. | - */ - "repos/compare-commits": { - parameters: { - path: { - owner: components["parameters"]["owner"]; - repo: components["parameters"]["repo"]; - base: string; - head: string; - }; - query: { - /** Results per page (max 100) */ - per_page?: components["parameters"]["per-page"]; - /** Page number of the results to fetch. */ - page?: components["parameters"]["page"]; - }; - }; - responses: { - /** Response */ - 200: { - content: { - "application/json": components["schemas"]["commit-comparison"]; - }; - }; - 404: components["responses"]["not_found"]; - 500: components["responses"]["internal_error"]; - }; - }; -} diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-types/index.d.ts b/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-types/index.d.ts deleted file mode 100644 index 5d080663..00000000 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-types/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from "./generated/types"; -export * from "./version"; diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-types/version.d.ts b/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-types/version.d.ts deleted file mode 100644 index 02faa5f2..00000000 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-types/version.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare const VERSION = "6.2.1"; diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-web/index.js b/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-web/index.js deleted file mode 100644 index 25ceac73..00000000 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-web/index.js +++ /dev/null @@ -1,4 +0,0 @@ -const VERSION = "6.2.1"; - -export { VERSION }; -//# sourceMappingURL=index.js.map diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-web/index.js.map b/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-web/index.js.map deleted file mode 100644 index 871b9d54..00000000 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/dist-web/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sources":["../dist-src/version.js"],"sourcesContent":["export const VERSION = \"6.2.1\";\n"],"names":[],"mappings":"AAAY,MAAC,OAAO,GAAG;;;;"} \ No newline at end of file diff --git a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/package.json b/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/package.json deleted file mode 100644 index 0e524a34..00000000 --- a/node_modules/@technote-space/github-action-pr-helper/node_modules/@octokit/openapi-types/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "@octokit/openapi-types", - "description": "Generated TypeScript definitions based on GitHub's OpenAPI spec", - "version": "6.2.1", - "license": "MIT", - "files": [ - "dist-*/", - "bin/" - ], - "pika": true, - "sideEffects": false, - "keywords": [], - "repository": "github:octokit/openapi-types.ts", - "dependencies": {}, - "devDependencies": { - "@octokit/tsconfig": "^1.0.2", - "@pika/pack": "^0.5.0", - "@pika/plugin-build-node": "^0.9.2", - "@pika/plugin-build-web": "^0.9.2", - "@pika/plugin-ts-standard-pkg": "^0.9.2", - "openapi-typescript": "^3.0.3", - "pika-plugin-merge-properties": "^1.0.6", - "semantic-release-plugin-update-version-in-files": "^1.1.0", - "typescript": "^4.1.5" - }, - "publishConfig": { - "access": "public" - }, - "source": "dist-src/index.js", - "types": "dist-types/index.d.ts", - "octokit": { - "openapi-version": "2.17.1" - }, - "main": "dist-node/index.js", - "module": "dist-web/index.js" -} diff --git a/node_modules/@technote-space/github-action-pr-helper/package.json b/node_modules/@technote-space/github-action-pr-helper/package.json index 32db6f0e..7ad78fec 100644 --- a/node_modules/@technote-space/github-action-pr-helper/package.json +++ b/node_modules/@technote-space/github-action-pr-helper/package.json @@ -1,6 +1,6 @@ { "name": "@technote-space/github-action-pr-helper", - "version": "2.2.6", + "version": "2.2.7", "description": "PullRequest Helper for GitHub Actions.", "keywords": [ "github", @@ -38,28 +38,28 @@ }, "dependencies": { "@actions/core": "^1.2.7", - "@actions/github": "^4.0.0", - "@octokit/openapi-types": "^6.2.1", - "@technote-space/filter-github-action": "^0.5.24", - "@technote-space/github-action-helper": "^5.2.6", + "@actions/github": "^5.0.0", + "@octokit/openapi-types": "^7.0.0", + "@technote-space/filter-github-action": "^0.5.27", + "@technote-space/github-action-helper": "^5.2.9", "moment": "^2.29.1" }, "devDependencies": { - "@commitlint/cli": "^12.1.1", - "@commitlint/config-conventional": "^12.1.1", - "@technote-space/github-action-test-helper": "^0.7.8", + "@commitlint/cli": "^12.1.4", + "@commitlint/config-conventional": "^12.1.4", + "@technote-space/github-action-test-helper": "^0.7.12", "@types/jest": "^26.0.23", - "@types/node": "^15.0.1", - "@typescript-eslint/eslint-plugin": "^4.22.0", - "@typescript-eslint/parser": "^4.22.0", - "eslint": "^7.25.0", + "@types/node": "^15.3.0", + "@typescript-eslint/eslint-plugin": "^4.23.0", + "@typescript-eslint/parser": "^4.23.0", + "eslint": "^7.26.0", "husky": "^6.0.0", "jest": "^26.6.3", "jest-circus": "^26.6.3", - "lint-staged": "^10.5.4", + "lint-staged": "^11.0.0", "nock": "^13.0.11", "pinst": "^2.1.6", - "ts-jest": "^26.5.5", + "ts-jest": "^26.5.6", "typescript": "^4.2.4" }, "publishConfig": { diff --git a/node_modules/@textlint/ast-node-types/CHANGELOG.md b/node_modules/@textlint/ast-node-types/CHANGELOG.md deleted file mode 100644 index 6e336130..00000000 --- a/node_modules/@textlint/ast-node-types/CHANGELOG.md +++ /dev/null @@ -1,378 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. -See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. - - -## [4.4.2](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.4.1...@textlint/ast-node-types@4.4.2) (2021-03-19) - - -### Chores - -* **deps:** update dependency mocha to ^8.3.0 ([0464adb](https://github.com/textlint/textlint/commit/0464adb)) -* **deps:** update dependency mocha to ^8.3.1 ([cc509ed](https://github.com/textlint/textlint/commit/cc509ed)) -* **deps:** update patch updates ([183eb8d](https://github.com/textlint/textlint/commit/183eb8d)) - - - - - - -## [4.4.1](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.3.4...@textlint/ast-node-types@4.4.1) (2021-01-22) - - -### Bug Fixes - -* **ast-node-types:** deprecated "ReferenceDef" type ([#701](https://github.com/textlint/textlint/issues/701)) ([a088520](https://github.com/textlint/textlint/commit/a088520)) - - -### Chores - -* **deps:** update minor updates ([7ef0be6](https://github.com/textlint/textlint/commit/7ef0be6)) -* **deps:** update minor updates ([2f3dcb6](https://github.com/textlint/textlint/commit/2f3dcb6)) -* **deps:** update patch updates ([3d9660b](https://github.com/textlint/textlint/commit/3d9660b)) -* **deps:** update patch updates ([fe2ad4f](https://github.com/textlint/textlint/commit/fe2ad4f)) -* **deps:** update patch updates ([e438ff2](https://github.com/textlint/textlint/commit/e438ff2)) -* **deps:** update patch updates ([4f4c206](https://github.com/textlint/textlint/commit/4f4c206)) -* **deps:** update TypeScript deps ([#705](https://github.com/textlint/textlint/issues/705)) ([1baa72a](https://github.com/textlint/textlint/commit/1baa72a)) -* use [@monorepo-utils](https://github.com/monorepo-utils)/workspaces-to-typescript-project-references ([#699](https://github.com/textlint/textlint/issues/699)) ([eff1943](https://github.com/textlint/textlint/commit/eff1943)) - - -### Features - -* **source-code-fixer:** add `[@textlint](https://github.com/textlint)/source-code-fixer` ([#736](https://github.com/textlint/textlint/issues/736)) ([bf7235a](https://github.com/textlint/textlint/commit/bf7235a)) - - - - - - -# [4.4.0](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.3.4...@textlint/ast-node-types@4.4.0) (2021-01-22) - - -### Bug Fixes - -* **ast-node-types:** deprecated "ReferenceDef" type ([#701](https://github.com/textlint/textlint/issues/701)) ([a088520](https://github.com/textlint/textlint/commit/a088520)) - - -### Chores - -* **deps:** update minor updates ([7ef0be6](https://github.com/textlint/textlint/commit/7ef0be6)) -* **deps:** update minor updates ([2f3dcb6](https://github.com/textlint/textlint/commit/2f3dcb6)) -* **deps:** update patch updates ([3d9660b](https://github.com/textlint/textlint/commit/3d9660b)) -* **deps:** update patch updates ([fe2ad4f](https://github.com/textlint/textlint/commit/fe2ad4f)) -* **deps:** update patch updates ([e438ff2](https://github.com/textlint/textlint/commit/e438ff2)) -* **deps:** update patch updates ([4f4c206](https://github.com/textlint/textlint/commit/4f4c206)) -* **deps:** update TypeScript deps ([#705](https://github.com/textlint/textlint/issues/705)) ([1baa72a](https://github.com/textlint/textlint/commit/1baa72a)) -* use [@monorepo-utils](https://github.com/monorepo-utils)/workspaces-to-typescript-project-references ([#699](https://github.com/textlint/textlint/issues/699)) ([eff1943](https://github.com/textlint/textlint/commit/eff1943)) - - -### Features - -* **source-code-fixer:** add `[@textlint](https://github.com/textlint)/source-code-fixer` ([#736](https://github.com/textlint/textlint/issues/736)) ([bf7235a](https://github.com/textlint/textlint/commit/bf7235a)) - - - - - - -## [4.3.5](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.3.4...@textlint/ast-node-types@4.3.5) (2020-12-22) - - -### Bug Fixes - -* **ast-node-types:** deprecated "ReferenceDef" type ([#701](https://github.com/textlint/textlint/issues/701)) ([a088520](https://github.com/textlint/textlint/commit/a088520)) - - -### Chores - -* **deps:** update minor updates ([7ef0be6](https://github.com/textlint/textlint/commit/7ef0be6)) -* **deps:** update minor updates ([2f3dcb6](https://github.com/textlint/textlint/commit/2f3dcb6)) -* **deps:** update patch updates ([3d9660b](https://github.com/textlint/textlint/commit/3d9660b)) -* **deps:** update patch updates ([fe2ad4f](https://github.com/textlint/textlint/commit/fe2ad4f)) -* **deps:** update patch updates ([e438ff2](https://github.com/textlint/textlint/commit/e438ff2)) -* **deps:** update patch updates ([4f4c206](https://github.com/textlint/textlint/commit/4f4c206)) -* **deps:** update TypeScript deps ([#705](https://github.com/textlint/textlint/issues/705)) ([1baa72a](https://github.com/textlint/textlint/commit/1baa72a)) -* use [@monorepo-utils](https://github.com/monorepo-utils)/workspaces-to-typescript-project-references ([#699](https://github.com/textlint/textlint/issues/699)) ([eff1943](https://github.com/textlint/textlint/commit/eff1943)) - - - - - - -## [4.3.4](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.3.3...@textlint/ast-node-types@4.3.4) (2020-07-24) - - -### Bug Fixes - -* improve "module" supports ([5ba5182](https://github.com/textlint/textlint/commit/5ba5182)) - - - - - - -## [4.3.3](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.3.2...@textlint/ast-node-types@4.3.3) (2020-07-24) - - -### Bug Fixes - -* "clean" command should remove tsconfig.module.tsbuildinfo ([76ac72a](https://github.com/textlint/textlint/commit/76ac72a)) - - - - - - -## [4.3.2](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.3.1...@textlint/ast-node-types@4.3.2) (2020-07-24) - - -### Bug Fixes - -* **@textlint/kernel:** fix export only type definition ([1234930](https://github.com/textlint/textlint/commit/1234930)) - - - - - - -## [4.3.1](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.3.0...@textlint/ast-node-types@4.3.1) (2020-07-24) - - -### Bug Fixes - -* include module ([2de05f7](https://github.com/textlint/textlint/commit/2de05f7)) - - - - - - -# [4.3.0](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.2.5...@textlint/ast-node-types@4.3.0) (2020-07-24) - - -### Chores - -* **deps:** update devDepencies ([#667](https://github.com/textlint/textlint/issues/667)) ([0503af6](https://github.com/textlint/textlint/commit/0503af6)) - - -### Code Refactoring - -* **typescript:** Use TypeScript Project References ([#668](https://github.com/textlint/textlint/issues/668)) ([bbffd43](https://github.com/textlint/textlint/commit/bbffd43)) - - -### Tests - -* migrate mocha.opts to .mocharc.json ([#682](https://github.com/textlint/textlint/issues/682)) ([332ae5e](https://github.com/textlint/textlint/commit/332ae5e)) - - - - - - -## [4.2.5](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.2.4...@textlint/ast-node-types@4.2.5) (2019-10-14) - - -### Tests - -* **ast-node-types:** fix type error ([9a678c5](https://github.com/textlint/textlint/commit/9a678c5)) - - - - - - -## [4.2.4](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.2.2...@textlint/ast-node-types@4.2.4) (2019-07-20) - - -### Code Refactoring - -* **utils:** move implementation from types to utils ([#611](https://github.com/textlint/textlint/issues/611)) ([cd9adbe](https://github.com/textlint/textlint/commit/cd9adbe)) - - - - - - -## [4.2.3](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.2.2...@textlint/ast-node-types@4.2.3) (2019-07-13) - - -### Code Refactoring - -* **utils:** move implementation from types to utils ([#611](https://github.com/textlint/textlint/issues/611)) ([cd9adbe](https://github.com/textlint/textlint/commit/cd9adbe)) - - - - - - -## [4.2.2](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.2.1...@textlint/ast-node-types@4.2.2) (2019-04-30) - - -### Chores - -* **deps:** update deps && devDeps ([a19463b](https://github.com/textlint/textlint/commit/a19463b)) - - -### Styles - -* apply prettier ([925a5a5](https://github.com/textlint/textlint/commit/925a5a5)) - - - - - - -## [4.2.1](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.2.0...@textlint/ast-node-types@4.2.1) (2019-01-03) - -**Note:** Version bump only for package @textlint/ast-node-types - - - - - - -# [4.2.0](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.1.0...@textlint/ast-node-types@4.2.0) (2019-01-03) - - -### Documentation - -* **rule:** add Type name and Node Type mapping table ([5ace9d1](https://github.com/textlint/textlint/commit/5ace9d1)) - - -### Features - -* **ast-node-types:** add `*Exit` type as constant value ([7106f5d](https://github.com/textlint/textlint/commit/7106f5d)) -* **ast-node-types:** add `TypeofTxtNode` type function ([69bc1ea](https://github.com/textlint/textlint/commit/69bc1ea)) - - -### Tests - -* **ast-note-types:** fix tests ([e708b7c](https://github.com/textlint/textlint/commit/e708b7c)) - - - - - - -# [4.1.0](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.0.3...@textlint/ast-node-types@4.1.0) (2019-01-01) - - -### Chores - -* **deps:** update TypeScript deps ([3ea7fb0](https://github.com/textlint/textlint/commit/3ea7fb0)) - - -### Code Refactoring - -* **types:** move type definition for rule to [@textlint](https://github.com/textlint)/types ([9be6e16](https://github.com/textlint/textlint/commit/9be6e16)) - - - - - -## [4.0.3](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.0.2...@textlint/ast-node-types@4.0.3) (2018-07-22) - - -### Chores - -* **deps:** update mocha ([5df8af4](https://github.com/textlint/textlint/commit/5df8af4)) - - -### Code Refactoring - -* **kernel:** separate linter and fixer descriptor ([b5bc8bd](https://github.com/textlint/textlint/commit/b5bc8bd)) - - - - - -## [4.0.2](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.0.1...@textlint/ast-node-types@4.0.2) (2018-03-25) - - -### Chores - -* **test:** use `ts-node-test-register` for TypeScript testing ([be746d8](https://github.com/textlint/textlint/commit/be746d8)), closes [#451](https://github.com/textlint/textlint/issues/451) - - - - - -## [4.0.1](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@4.0.0...@textlint/ast-node-types@4.0.1) (2018-01-18) - - - - -**Note:** Version bump only for package @textlint/ast-node-types - - -# [4.0.0](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@3.0.1...@textlint/ast-node-types@4.0.0) (2017-12-31) - - -### Chores - -* **ast-node-types:** Make ASTNodeTypes enum ([8ae3e67](https://github.com/textlint/textlint/commit/8ae3e67)) - - -### BREAKING CHANGES - -* **ast-node-types:** TypeScript need to user it as enum - - - - - -## [3.0.1](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@3.0.0...@textlint/ast-node-types@3.0.1) (2017-12-25) - - -### Bug Fixes - -* **monorepo:** fix TypeScript module resolution in monorepo ([d5df499](https://github.com/textlint/textlint/commit/d5df499)) - - - - - -# [3.0.0](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@3.0.0-next.0...@textlint/ast-node-types@3.0.0) (2017-12-18) - - - - -**Note:** Version bump only for package @textlint/ast-node-types - - -# [3.0.0-next.0](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@2.0.0...@textlint/ast-node-types@3.0.0-next.0) (2017-12-15) - - -### Bug Fixes - -* **ast-node-types:** Make TxtNode non-weak type ([#382](https://github.com/textlint/textlint/issues/382)) ([81c0145](https://github.com/textlint/textlint/commit/81c0145)) - - - - - -## [1.1.2](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@1.1.1...@textlint/ast-node-types@1.1.2) (2017-05-05) - - -### Bug Fixes - -* **docs:** README ([dd98f5c](https://github.com/textlint/textlint/commit/dd98f5c)) - - - - - -## [1.1.1](https://github.com/textlint/textlint/compare/@textlint/ast-node-types@1.1.0...@textlint/ast-node-types@1.1.1) (2017-05-05) - - - - - -# 1.1.0 (2017-05-05) - - -### Features - -* **ast-node-types:** add ast-node-types ([2ba6583](https://github.com/textlint/textlint/commit/2ba6583)) -* **packages:** add [@textlint](https://github.com/textlint)/ast-node-types package ([66fb289](https://github.com/textlint/textlint/commit/66fb289)) diff --git a/node_modules/@textlint/ast-node-types/lib/index.js.map b/node_modules/@textlint/ast-node-types/lib/index.js.map deleted file mode 100644 index ec06f7d9..00000000 --- a/node_modules/@textlint/ast-node-types/lib/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,iBAAiB;AACjB,YAAY,CAAC;;;AASb,IAAY,YAgDX;AAhDD,WAAY,YAAY;IACpB,qCAAqB,CAAA;IACrB,8CAA8B,CAAA;IAC9B,uCAAuB,CAAA;IACvB,gDAAgC,CAAA;IAChC,yCAAyB,CAAA;IACzB,kDAAkC,CAAA;IAClC,qCAAqB,CAAA;IACrB,8CAA8B,CAAA;IAC9B,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,iCAAiB,CAAA;IACjB,0CAA0B,CAAA;IAC1B,uCAAuB,CAAA;IACvB,gDAAgC,CAAA;IAChC,uCAAuB,CAAA;IACvB,gDAAgC,CAAA;IAChC,iDAAiC,CAAA;IACjC,0DAA0C,CAAA;IAC1C,mCAAmB,CAAA;IACnB,4CAA4B,CAAA;IAC5B;;OAEG;IACH,6CAA6B,CAAA;IAC7B;;OAEG;IACH,sDAAsC,CAAA;IACtC,SAAS;IACT,2BAAW,CAAA;IACX,oCAAoB,CAAA;IACpB,+BAAe,CAAA;IACf,wCAAwB,CAAA;IACxB,qCAAqB,CAAA;IACrB,8CAA8B,CAAA;IAC9B,iCAAiB,CAAA;IACjB,0CAA0B,CAAA;IAC1B,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,+BAAe,CAAA;IACf,wCAAwB,CAAA;IACxB,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,iCAAiB,CAAA;IACjB,0CAA0B,CAAA;AAC9B,CAAC,EAhDW,YAAY,GAAZ,oBAAY,KAAZ,oBAAY,QAgDvB"} \ No newline at end of file diff --git a/node_modules/@textlint/ast-node-types/lib/TypeofTxtNode.d.ts b/node_modules/@textlint/ast-node-types/lib/src/TypeofTxtNode.d.ts similarity index 100% rename from node_modules/@textlint/ast-node-types/lib/TypeofTxtNode.d.ts rename to node_modules/@textlint/ast-node-types/lib/src/TypeofTxtNode.d.ts diff --git a/node_modules/@textlint/ast-node-types/lib/TypeofTxtNode.js b/node_modules/@textlint/ast-node-types/lib/src/TypeofTxtNode.js similarity index 100% rename from node_modules/@textlint/ast-node-types/lib/TypeofTxtNode.js rename to node_modules/@textlint/ast-node-types/lib/src/TypeofTxtNode.js diff --git a/node_modules/@textlint/ast-node-types/lib/TypeofTxtNode.js.map b/node_modules/@textlint/ast-node-types/lib/src/TypeofTxtNode.js.map similarity index 52% rename from node_modules/@textlint/ast-node-types/lib/TypeofTxtNode.js.map rename to node_modules/@textlint/ast-node-types/lib/src/TypeofTxtNode.js.map index 8c7eff29..84b4403d 100644 --- a/node_modules/@textlint/ast-node-types/lib/TypeofTxtNode.js.map +++ b/node_modules/@textlint/ast-node-types/lib/src/TypeofTxtNode.js.map @@ -1 +1 @@ -{"version":3,"file":"TypeofTxtNode.js","sourceRoot":"","sources":["../src/TypeofTxtNode.ts"],"names":[],"mappings":""} \ No newline at end of file +{"version":3,"file":"TypeofTxtNode.js","sourceRoot":"","sources":["../../src/TypeofTxtNode.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@textlint/ast-node-types/lib/index.d.ts b/node_modules/@textlint/ast-node-types/lib/src/index.d.ts similarity index 100% rename from node_modules/@textlint/ast-node-types/lib/index.d.ts rename to node_modules/@textlint/ast-node-types/lib/src/index.d.ts diff --git a/node_modules/@textlint/ast-node-types/lib/index.js b/node_modules/@textlint/ast-node-types/lib/src/index.js similarity index 100% rename from node_modules/@textlint/ast-node-types/lib/index.js rename to node_modules/@textlint/ast-node-types/lib/src/index.js diff --git a/node_modules/@textlint/ast-node-types/lib/src/index.js.map b/node_modules/@textlint/ast-node-types/lib/src/index.js.map new file mode 100644 index 00000000..c6050240 --- /dev/null +++ b/node_modules/@textlint/ast-node-types/lib/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,iBAAiB;AACjB,YAAY,CAAC;;;AASb,IAAY,YAgDX;AAhDD,WAAY,YAAY;IACpB,qCAAqB,CAAA;IACrB,8CAA8B,CAAA;IAC9B,uCAAuB,CAAA;IACvB,gDAAgC,CAAA;IAChC,yCAAyB,CAAA;IACzB,kDAAkC,CAAA;IAClC,qCAAqB,CAAA;IACrB,8CAA8B,CAAA;IAC9B,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,iCAAiB,CAAA;IACjB,0CAA0B,CAAA;IAC1B,uCAAuB,CAAA;IACvB,gDAAgC,CAAA;IAChC,uCAAuB,CAAA;IACvB,gDAAgC,CAAA;IAChC,iDAAiC,CAAA;IACjC,0DAA0C,CAAA;IAC1C,mCAAmB,CAAA;IACnB,4CAA4B,CAAA;IAC5B;;OAEG;IACH,6CAA6B,CAAA;IAC7B;;OAEG;IACH,sDAAsC,CAAA;IACtC,SAAS;IACT,2BAAW,CAAA;IACX,oCAAoB,CAAA;IACpB,+BAAe,CAAA;IACf,wCAAwB,CAAA;IACxB,qCAAqB,CAAA;IACrB,8CAA8B,CAAA;IAC9B,iCAAiB,CAAA;IACjB,0CAA0B,CAAA;IAC1B,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,+BAAe,CAAA;IACf,wCAAwB,CAAA;IACxB,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,iCAAiB,CAAA;IACjB,0CAA0B,CAAA;AAC9B,CAAC,EAhDW,YAAY,GAAZ,oBAAY,KAAZ,oBAAY,QAgDvB"} \ No newline at end of file diff --git a/node_modules/@textlint/ast-node-types/module/index.js.map b/node_modules/@textlint/ast-node-types/module/index.js.map deleted file mode 100644 index 400f4346..00000000 --- a/node_modules/@textlint/ast-node-types/module/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,iBAAiB;AACjB,YAAY,CAAC;AASb,MAAM,CAAN,IAAY,YAgDX;AAhDD,WAAY,YAAY;IACpB,qCAAqB,CAAA;IACrB,8CAA8B,CAAA;IAC9B,uCAAuB,CAAA;IACvB,gDAAgC,CAAA;IAChC,yCAAyB,CAAA;IACzB,kDAAkC,CAAA;IAClC,qCAAqB,CAAA;IACrB,8CAA8B,CAAA;IAC9B,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,iCAAiB,CAAA;IACjB,0CAA0B,CAAA;IAC1B,uCAAuB,CAAA;IACvB,gDAAgC,CAAA;IAChC,uCAAuB,CAAA;IACvB,gDAAgC,CAAA;IAChC,iDAAiC,CAAA;IACjC,0DAA0C,CAAA;IAC1C,mCAAmB,CAAA;IACnB,4CAA4B,CAAA;IAC5B;;OAEG;IACH,6CAA6B,CAAA;IAC7B;;OAEG;IACH,sDAAsC,CAAA;IACtC,SAAS;IACT,2BAAW,CAAA;IACX,oCAAoB,CAAA;IACpB,+BAAe,CAAA;IACf,wCAAwB,CAAA;IACxB,qCAAqB,CAAA;IACrB,8CAA8B,CAAA;IAC9B,iCAAiB,CAAA;IACjB,0CAA0B,CAAA;IAC1B,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,+BAAe,CAAA;IACf,wCAAwB,CAAA;IACxB,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,iCAAiB,CAAA;IACjB,0CAA0B,CAAA;AAC9B,CAAC,EAhDW,YAAY,KAAZ,YAAY,QAgDvB"} \ No newline at end of file diff --git a/node_modules/@textlint/ast-node-types/module/TypeofTxtNode.d.ts b/node_modules/@textlint/ast-node-types/module/src/TypeofTxtNode.d.ts similarity index 100% rename from node_modules/@textlint/ast-node-types/module/TypeofTxtNode.d.ts rename to node_modules/@textlint/ast-node-types/module/src/TypeofTxtNode.d.ts diff --git a/node_modules/@textlint/ast-node-types/module/TypeofTxtNode.js b/node_modules/@textlint/ast-node-types/module/src/TypeofTxtNode.js similarity index 100% rename from node_modules/@textlint/ast-node-types/module/TypeofTxtNode.js rename to node_modules/@textlint/ast-node-types/module/src/TypeofTxtNode.js diff --git a/node_modules/@textlint/ast-node-types/module/TypeofTxtNode.js.map b/node_modules/@textlint/ast-node-types/module/src/TypeofTxtNode.js.map similarity index 52% rename from node_modules/@textlint/ast-node-types/module/TypeofTxtNode.js.map rename to node_modules/@textlint/ast-node-types/module/src/TypeofTxtNode.js.map index 8c7eff29..84b4403d 100644 --- a/node_modules/@textlint/ast-node-types/module/TypeofTxtNode.js.map +++ b/node_modules/@textlint/ast-node-types/module/src/TypeofTxtNode.js.map @@ -1 +1 @@ -{"version":3,"file":"TypeofTxtNode.js","sourceRoot":"","sources":["../src/TypeofTxtNode.ts"],"names":[],"mappings":""} \ No newline at end of file +{"version":3,"file":"TypeofTxtNode.js","sourceRoot":"","sources":["../../src/TypeofTxtNode.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@textlint/ast-node-types/module/index.d.ts b/node_modules/@textlint/ast-node-types/module/src/index.d.ts similarity index 100% rename from node_modules/@textlint/ast-node-types/module/index.d.ts rename to node_modules/@textlint/ast-node-types/module/src/index.d.ts diff --git a/node_modules/@textlint/ast-node-types/module/index.js b/node_modules/@textlint/ast-node-types/module/src/index.js similarity index 100% rename from node_modules/@textlint/ast-node-types/module/index.js rename to node_modules/@textlint/ast-node-types/module/src/index.js diff --git a/node_modules/@textlint/ast-node-types/module/src/index.js.map b/node_modules/@textlint/ast-node-types/module/src/index.js.map new file mode 100644 index 00000000..0b4844a5 --- /dev/null +++ b/node_modules/@textlint/ast-node-types/module/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,iBAAiB;AACjB,YAAY,CAAC;AASb,MAAM,CAAN,IAAY,YAgDX;AAhDD,WAAY,YAAY;IACpB,qCAAqB,CAAA;IACrB,8CAA8B,CAAA;IAC9B,uCAAuB,CAAA;IACvB,gDAAgC,CAAA;IAChC,yCAAyB,CAAA;IACzB,kDAAkC,CAAA;IAClC,qCAAqB,CAAA;IACrB,8CAA8B,CAAA;IAC9B,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,iCAAiB,CAAA;IACjB,0CAA0B,CAAA;IAC1B,uCAAuB,CAAA;IACvB,gDAAgC,CAAA;IAChC,uCAAuB,CAAA;IACvB,gDAAgC,CAAA;IAChC,iDAAiC,CAAA;IACjC,0DAA0C,CAAA;IAC1C,mCAAmB,CAAA;IACnB,4CAA4B,CAAA;IAC5B;;OAEG;IACH,6CAA6B,CAAA;IAC7B;;OAEG;IACH,sDAAsC,CAAA;IACtC,SAAS;IACT,2BAAW,CAAA;IACX,oCAAoB,CAAA;IACpB,+BAAe,CAAA;IACf,wCAAwB,CAAA;IACxB,qCAAqB,CAAA;IACrB,8CAA8B,CAAA;IAC9B,iCAAiB,CAAA;IACjB,0CAA0B,CAAA;IAC1B,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,+BAAe,CAAA;IACf,wCAAwB,CAAA;IACxB,6BAAa,CAAA;IACb,sCAAsB,CAAA;IACtB,iCAAiB,CAAA;IACjB,0CAA0B,CAAA;AAC9B,CAAC,EAhDW,YAAY,KAAZ,YAAY,QAgDvB"} \ No newline at end of file diff --git a/node_modules/@textlint/ast-node-types/package.json b/node_modules/@textlint/ast-node-types/package.json index 336c0093..b87d1ebc 100644 --- a/node_modules/@textlint/ast-node-types/package.json +++ b/node_modules/@textlint/ast-node-types/package.json @@ -1,6 +1,6 @@ { "name": "@textlint/ast-node-types", - "version": "4.4.2", + "version": "12.0.0", "description": "textlint AST node type definition.", "keywords": [ "textlint" @@ -11,28 +11,29 @@ }, "license": "MIT", "author": "azu", - "main": "./lib/index.js", - "module": "./module/index.js", - "types": "./lib/index.d.ts", + "main": "./lib/src/index.js", + "module": "./module/src/index.js", + "types": "./lib/src/index.d.ts", "files": [ "bin/", "lib/", "module/", - "src/" + "src/", + "!*.tsbuildinfo" ], "scripts": { "build": "tsc -b && tsc -b tsconfig.module.json", - "clean": "rimraf lib/ module/ tsconfig.tsbuildinfo tsconfig.module.tsbuildinfo", + "clean": "rimraf lib/ module/", "prepublish": "npm run build", "test": "mocha \"test/**/*.{js,ts}\"" }, "devDependencies": { "cross-env": "^7.0.3", - "mocha": "^8.3.2", + "mocha": "^8.4.0", "rimraf": "^3.0.2", "ts-node": "^9.1.1", "ts-node-test-register": "^9.0.1", - "typescript": "~4.0.2" + "typescript": "~4.2.4" }, - "gitHead": "5c40c1429736a0ed86bea07a39cb0eba31be11d4" + "gitHead": "8b4fc20f5fb4d8e581bdcecff510d83e7701faa9" } diff --git a/node_modules/@textlint/markdown-to-ast/CHANGELOG.md b/node_modules/@textlint/markdown-to-ast/CHANGELOG.md deleted file mode 100644 index d409b564..00000000 --- a/node_modules/@textlint/markdown-to-ast/CHANGELOG.md +++ /dev/null @@ -1,561 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. -See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. - -## [6.3.4](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.3.3...@textlint/markdown-to-ast@6.3.4) (2021-03-21) - -**Note:** Version bump only for package @textlint/markdown-to-ast - - - - - - -## [6.3.3](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.3.2...@textlint/markdown-to-ast@6.3.3) (2021-03-19) - - -### Chores - -* **deps:** update dependency [@types](https://github.com/types)/mocha to v8.2.1 ([da2d6e7](https://github.com/textlint/textlint/commit/da2d6e7)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.27 ([c1c0b86](https://github.com/textlint/textlint/commit/c1c0b86)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.28 ([136e255](https://github.com/textlint/textlint/commit/136e255)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.29 ([74af03b](https://github.com/textlint/textlint/commit/74af03b)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.30 ([4872821](https://github.com/textlint/textlint/commit/4872821)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.31 ([95821ad](https://github.com/textlint/textlint/commit/95821ad)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.32 ([0b0a384](https://github.com/textlint/textlint/commit/0b0a384)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.33 ([a05e2d9](https://github.com/textlint/textlint/commit/a05e2d9)) -* **deps:** update dependency mocha to ^8.3.0 ([0464adb](https://github.com/textlint/textlint/commit/0464adb)) -* **deps:** update dependency mocha to ^8.3.1 ([cc509ed](https://github.com/textlint/textlint/commit/cc509ed)) -* **deps:** update patch updates ([183eb8d](https://github.com/textlint/textlint/commit/183eb8d)) - - - - - - -## [6.3.2](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.3.1...@textlint/markdown-to-ast@6.3.2) (2021-02-06) - - -### Chores - -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.25 ([cc5c800](https://github.com/textlint/textlint/commit/cc5c800)) - - - - - - -## [6.3.1](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.2.5...@textlint/markdown-to-ast@6.3.1) (2021-01-22) - - -### Bug Fixes - -* **@textlint/markdown-to-ast:** fix [@ts-expect-error](https://github.com/ts-expect-error) issue ([d47f3ec](https://github.com/textlint/textlint/commit/d47f3ec)) -* **ast-node-types:** deprecated "ReferenceDef" type ([#701](https://github.com/textlint/textlint/issues/701)) ([a088520](https://github.com/textlint/textlint/commit/a088520)) -* **deps:** update dependency debug to ^4.2.0 ([6db0ba9](https://github.com/textlint/textlint/commit/6db0ba9)) -* **deps:** update dependency debug to ^4.3.0 ([9dea96a](https://github.com/textlint/textlint/commit/9dea96a)) -* fix tsconfig ([a722a6f](https://github.com/textlint/textlint/commit/a722a6f)) - - -### Chores - -* **deps:** update dependency [@types](https://github.com/types)/mocha to v8.0.4 ([fd60f20](https://github.com/textlint/textlint/commit/fd60f20)) -* **deps:** update dependency [@types](https://github.com/types)/mocha to v8.2.0 ([5ced72d](https://github.com/textlint/textlint/commit/5ced72d)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.11.2 ([6ac372a](https://github.com/textlint/textlint/commit/6ac372a)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.11.5 ([b0e73f5](https://github.com/textlint/textlint/commit/b0e73f5)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.11.8 ([33679af](https://github.com/textlint/textlint/commit/33679af)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.10 ([96ba315](https://github.com/textlint/textlint/commit/96ba315)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.12 ([f62f2e3](https://github.com/textlint/textlint/commit/f62f2e3)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.13 ([608afd4](https://github.com/textlint/textlint/commit/608afd4)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.14 ([8417f8a](https://github.com/textlint/textlint/commit/8417f8a)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.16 ([7209803](https://github.com/textlint/textlint/commit/7209803)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.17 ([9ec5481](https://github.com/textlint/textlint/commit/9ec5481)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.19 ([bd050c9](https://github.com/textlint/textlint/commit/bd050c9)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.21 ([464d58a](https://github.com/textlint/textlint/commit/464d58a)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.22 ([5e5d214](https://github.com/textlint/textlint/commit/5e5d214)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.5 ([1ed7006](https://github.com/textlint/textlint/commit/1ed7006)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.6 ([507a41b](https://github.com/textlint/textlint/commit/507a41b)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.7 ([64f97eb](https://github.com/textlint/textlint/commit/64f97eb)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.8 ([8de7d16](https://github.com/textlint/textlint/commit/8de7d16)) -* **deps:** update minor updates ([7ef0be6](https://github.com/textlint/textlint/commit/7ef0be6)) -* **deps:** update minor updates ([2f3dcb6](https://github.com/textlint/textlint/commit/2f3dcb6)) -* **deps:** update minor updates ([7a53517](https://github.com/textlint/textlint/commit/7a53517)) -* **deps:** update minor updates ([#712](https://github.com/textlint/textlint/issues/712)) ([8c42a19](https://github.com/textlint/textlint/commit/8c42a19)) -* **deps:** update patch updates ([24fe2a9](https://github.com/textlint/textlint/commit/24fe2a9)) -* **deps:** update patch updates ([3d9660b](https://github.com/textlint/textlint/commit/3d9660b)) -* **deps:** update patch updates ([fe2ad4f](https://github.com/textlint/textlint/commit/fe2ad4f)) -* **deps:** update patch updates ([e438ff2](https://github.com/textlint/textlint/commit/e438ff2)) -* **deps:** update patch updates ([9df50df](https://github.com/textlint/textlint/commit/9df50df)) -* **deps:** update patch updates ([4f4c206](https://github.com/textlint/textlint/commit/4f4c206)) -* **deps:** update patch updates ([1a0e41f](https://github.com/textlint/textlint/commit/1a0e41f)) -* **deps:** update patch updates ([9157dda](https://github.com/textlint/textlint/commit/9157dda)) -* **deps:** update TypeScript deps ([#705](https://github.com/textlint/textlint/issues/705)) ([1baa72a](https://github.com/textlint/textlint/commit/1baa72a)) -* use [@monorepo-utils](https://github.com/monorepo-utils)/workspaces-to-typescript-project-references ([#699](https://github.com/textlint/textlint/issues/699)) ([eff1943](https://github.com/textlint/textlint/commit/eff1943)) - - -### Features - -* **source-code-fixer:** add `[@textlint](https://github.com/textlint)/source-code-fixer` ([#736](https://github.com/textlint/textlint/issues/736)) ([bf7235a](https://github.com/textlint/textlint/commit/bf7235a)) - - - - - - -# [6.3.0](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.2.5...@textlint/markdown-to-ast@6.3.0) (2021-01-22) - - -### Bug Fixes - -* **@textlint/markdown-to-ast:** fix [@ts-expect-error](https://github.com/ts-expect-error) issue ([d47f3ec](https://github.com/textlint/textlint/commit/d47f3ec)) -* **ast-node-types:** deprecated "ReferenceDef" type ([#701](https://github.com/textlint/textlint/issues/701)) ([a088520](https://github.com/textlint/textlint/commit/a088520)) -* **deps:** update dependency debug to ^4.2.0 ([6db0ba9](https://github.com/textlint/textlint/commit/6db0ba9)) -* **deps:** update dependency debug to ^4.3.0 ([9dea96a](https://github.com/textlint/textlint/commit/9dea96a)) -* fix tsconfig ([a722a6f](https://github.com/textlint/textlint/commit/a722a6f)) - - -### Chores - -* **deps:** update dependency [@types](https://github.com/types)/mocha to v8.0.4 ([fd60f20](https://github.com/textlint/textlint/commit/fd60f20)) -* **deps:** update dependency [@types](https://github.com/types)/mocha to v8.2.0 ([5ced72d](https://github.com/textlint/textlint/commit/5ced72d)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.11.2 ([6ac372a](https://github.com/textlint/textlint/commit/6ac372a)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.11.5 ([b0e73f5](https://github.com/textlint/textlint/commit/b0e73f5)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.11.8 ([33679af](https://github.com/textlint/textlint/commit/33679af)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.10 ([96ba315](https://github.com/textlint/textlint/commit/96ba315)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.12 ([f62f2e3](https://github.com/textlint/textlint/commit/f62f2e3)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.13 ([608afd4](https://github.com/textlint/textlint/commit/608afd4)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.14 ([8417f8a](https://github.com/textlint/textlint/commit/8417f8a)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.16 ([7209803](https://github.com/textlint/textlint/commit/7209803)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.17 ([9ec5481](https://github.com/textlint/textlint/commit/9ec5481)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.19 ([bd050c9](https://github.com/textlint/textlint/commit/bd050c9)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.21 ([464d58a](https://github.com/textlint/textlint/commit/464d58a)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.22 ([5e5d214](https://github.com/textlint/textlint/commit/5e5d214)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.5 ([1ed7006](https://github.com/textlint/textlint/commit/1ed7006)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.6 ([507a41b](https://github.com/textlint/textlint/commit/507a41b)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.7 ([64f97eb](https://github.com/textlint/textlint/commit/64f97eb)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.8 ([8de7d16](https://github.com/textlint/textlint/commit/8de7d16)) -* **deps:** update minor updates ([7ef0be6](https://github.com/textlint/textlint/commit/7ef0be6)) -* **deps:** update minor updates ([2f3dcb6](https://github.com/textlint/textlint/commit/2f3dcb6)) -* **deps:** update minor updates ([7a53517](https://github.com/textlint/textlint/commit/7a53517)) -* **deps:** update minor updates ([#712](https://github.com/textlint/textlint/issues/712)) ([8c42a19](https://github.com/textlint/textlint/commit/8c42a19)) -* **deps:** update patch updates ([24fe2a9](https://github.com/textlint/textlint/commit/24fe2a9)) -* **deps:** update patch updates ([3d9660b](https://github.com/textlint/textlint/commit/3d9660b)) -* **deps:** update patch updates ([fe2ad4f](https://github.com/textlint/textlint/commit/fe2ad4f)) -* **deps:** update patch updates ([e438ff2](https://github.com/textlint/textlint/commit/e438ff2)) -* **deps:** update patch updates ([9df50df](https://github.com/textlint/textlint/commit/9df50df)) -* **deps:** update patch updates ([4f4c206](https://github.com/textlint/textlint/commit/4f4c206)) -* **deps:** update patch updates ([1a0e41f](https://github.com/textlint/textlint/commit/1a0e41f)) -* **deps:** update patch updates ([9157dda](https://github.com/textlint/textlint/commit/9157dda)) -* **deps:** update TypeScript deps ([#705](https://github.com/textlint/textlint/issues/705)) ([1baa72a](https://github.com/textlint/textlint/commit/1baa72a)) -* use [@monorepo-utils](https://github.com/monorepo-utils)/workspaces-to-typescript-project-references ([#699](https://github.com/textlint/textlint/issues/699)) ([eff1943](https://github.com/textlint/textlint/commit/eff1943)) - - -### Features - -* **source-code-fixer:** add `[@textlint](https://github.com/textlint)/source-code-fixer` ([#736](https://github.com/textlint/textlint/issues/736)) ([bf7235a](https://github.com/textlint/textlint/commit/bf7235a)) - - - - - - -## [6.2.6](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.2.5...@textlint/markdown-to-ast@6.2.6) (2020-12-22) - - -### Bug Fixes - -* **@textlint/markdown-to-ast:** fix [@ts-expect-error](https://github.com/ts-expect-error) issue ([d47f3ec](https://github.com/textlint/textlint/commit/d47f3ec)) -* **ast-node-types:** deprecated "ReferenceDef" type ([#701](https://github.com/textlint/textlint/issues/701)) ([a088520](https://github.com/textlint/textlint/commit/a088520)) -* **deps:** update dependency debug to ^4.2.0 ([6db0ba9](https://github.com/textlint/textlint/commit/6db0ba9)) -* **deps:** update dependency debug to ^4.3.0 ([9dea96a](https://github.com/textlint/textlint/commit/9dea96a)) -* fix tsconfig ([a722a6f](https://github.com/textlint/textlint/commit/a722a6f)) - - -### Chores - -* **deps:** update dependency [@types](https://github.com/types)/mocha to v8.0.4 ([fd60f20](https://github.com/textlint/textlint/commit/fd60f20)) -* **deps:** update dependency [@types](https://github.com/types)/mocha to v8.2.0 ([5ced72d](https://github.com/textlint/textlint/commit/5ced72d)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.11.2 ([6ac372a](https://github.com/textlint/textlint/commit/6ac372a)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.11.5 ([b0e73f5](https://github.com/textlint/textlint/commit/b0e73f5)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.11.8 ([33679af](https://github.com/textlint/textlint/commit/33679af)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.10 ([96ba315](https://github.com/textlint/textlint/commit/96ba315)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.12 ([f62f2e3](https://github.com/textlint/textlint/commit/f62f2e3)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.13 ([608afd4](https://github.com/textlint/textlint/commit/608afd4)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.14 ([8417f8a](https://github.com/textlint/textlint/commit/8417f8a)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.5 ([1ed7006](https://github.com/textlint/textlint/commit/1ed7006)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.6 ([507a41b](https://github.com/textlint/textlint/commit/507a41b)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.7 ([64f97eb](https://github.com/textlint/textlint/commit/64f97eb)) -* **deps:** update dependency [@types](https://github.com/types)/node to ^14.14.8 ([8de7d16](https://github.com/textlint/textlint/commit/8de7d16)) -* **deps:** update minor updates ([7ef0be6](https://github.com/textlint/textlint/commit/7ef0be6)) -* **deps:** update minor updates ([2f3dcb6](https://github.com/textlint/textlint/commit/2f3dcb6)) -* **deps:** update minor updates ([7a53517](https://github.com/textlint/textlint/commit/7a53517)) -* **deps:** update minor updates ([#712](https://github.com/textlint/textlint/issues/712)) ([8c42a19](https://github.com/textlint/textlint/commit/8c42a19)) -* **deps:** update patch updates ([3d9660b](https://github.com/textlint/textlint/commit/3d9660b)) -* **deps:** update patch updates ([fe2ad4f](https://github.com/textlint/textlint/commit/fe2ad4f)) -* **deps:** update patch updates ([e438ff2](https://github.com/textlint/textlint/commit/e438ff2)) -* **deps:** update patch updates ([9df50df](https://github.com/textlint/textlint/commit/9df50df)) -* **deps:** update patch updates ([4f4c206](https://github.com/textlint/textlint/commit/4f4c206)) -* **deps:** update patch updates ([1a0e41f](https://github.com/textlint/textlint/commit/1a0e41f)) -* **deps:** update patch updates ([9157dda](https://github.com/textlint/textlint/commit/9157dda)) -* **deps:** update TypeScript deps ([#705](https://github.com/textlint/textlint/issues/705)) ([1baa72a](https://github.com/textlint/textlint/commit/1baa72a)) -* use [@monorepo-utils](https://github.com/monorepo-utils)/workspaces-to-typescript-project-references ([#699](https://github.com/textlint/textlint/issues/699)) ([eff1943](https://github.com/textlint/textlint/commit/eff1943)) - - - - - - -## [6.2.5](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.2.4...@textlint/markdown-to-ast@6.2.5) (2020-07-24) - - -### Bug Fixes - -* improve "module" supports ([5ba5182](https://github.com/textlint/textlint/commit/5ba5182)) - - - - - - -## [6.2.4](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.2.3...@textlint/markdown-to-ast@6.2.4) (2020-07-24) - - -### Bug Fixes - -* "clean" command should remove tsconfig.module.tsbuildinfo ([76ac72a](https://github.com/textlint/textlint/commit/76ac72a)) - - - - - - -## [6.2.3](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.2.2...@textlint/markdown-to-ast@6.2.3) (2020-07-24) - - -### Bug Fixes - -* **@textlint/kernel:** fix export only type definition ([1234930](https://github.com/textlint/textlint/commit/1234930)) - - - - - - -## [6.2.2](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.2.1...@textlint/markdown-to-ast@6.2.2) (2020-07-24) - - -### Bug Fixes - -* **@textlint/markdown-to-ast:** fix "module" field ([b577531](https://github.com/textlint/textlint/commit/b577531)) - - -### Tests - -* **@textlint/markdown-to-ast:** fix import ([03a5fdd](https://github.com/textlint/textlint/commit/03a5fdd)) - - - - - - -## [6.2.1](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.2.0...@textlint/markdown-to-ast@6.2.1) (2020-07-24) - - -### Bug Fixes - -* include module ([2de05f7](https://github.com/textlint/textlint/commit/2de05f7)) - - - - - - -# [6.2.0](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.1.7...@textlint/markdown-to-ast@6.2.0) (2020-07-24) - - -### Chores - -* **deps:** update devDepencies ([#667](https://github.com/textlint/textlint/issues/667)) ([0503af6](https://github.com/textlint/textlint/commit/0503af6)) - - -### Code Refactoring - -* **markdown-to-ast:** Convert to TypScript ([#671](https://github.com/textlint/textlint/issues/671)) ([b879a09](https://github.com/textlint/textlint/commit/b879a09)) - - -### Tests - -* migrate mocha.opts to .mocharc.json ([#682](https://github.com/textlint/textlint/issues/682)) ([332ae5e](https://github.com/textlint/textlint/commit/332ae5e)) - - - - - - -## [6.1.7](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.1.6...@textlint/markdown-to-ast@6.1.7) (2020-02-07) - -**Note:** Version bump only for package @textlint/markdown-to-ast - - - - - - -## [6.1.6](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.1.5...@textlint/markdown-to-ast@6.1.6) (2019-10-14) - -**Note:** Version bump only for package @textlint/markdown-to-ast - - - - - - -## [6.1.5](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.1.3...@textlint/markdown-to-ast@6.1.5) (2019-07-20) - -**Note:** Version bump only for package @textlint/markdown-to-ast - - - - - - -## [6.1.4](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.1.3...@textlint/markdown-to-ast@6.1.4) (2019-07-13) - -**Note:** Version bump only for package @textlint/markdown-to-ast - - - - - - -## [6.1.3](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.1.2...@textlint/markdown-to-ast@6.1.3) (2019-04-30) - - -### Chores - -* **deps:** update deps && devDeps ([a19463b](https://github.com/textlint/textlint/commit/a19463b)) - - - - - - -## [6.1.2](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.1.1...@textlint/markdown-to-ast@6.1.2) (2019-01-03) - -**Note:** Version bump only for package @textlint/markdown-to-ast - - - - - - -## [6.1.1](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.1.0...@textlint/markdown-to-ast@6.1.1) (2019-01-03) - - -### Code Refactoring - -* **markdown-to-ast:** remove unused deps" ([3a5dc5d](https://github.com/textlint/textlint/commit/3a5dc5d)) - - - - - - -# [6.1.0](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.0.9...@textlint/markdown-to-ast@6.1.0) (2019-01-01) - - -### Chores - -* **deps:** update TypeScript deps ([3ea7fb0](https://github.com/textlint/textlint/commit/3ea7fb0)) - - -### Styles - -* **eslint:** apply eslint to all files ([6a9573f](https://github.com/textlint/textlint/commit/6a9573f)) -* **prettier:** format style by prettier ([19a2901](https://github.com/textlint/textlint/commit/19a2901)) - - - - - -## [6.0.9](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.0.8...@textlint/markdown-to-ast@6.0.9) (2018-07-22) - - -### Chores - -* **deps:** update mocha ([5df8af4](https://github.com/textlint/textlint/commit/5df8af4)) - - -### Code Refactoring - -* **typescript:** update to TypeScript 2.8 ([f7b2b08](https://github.com/textlint/textlint/commit/f7b2b08)) - - - - - -## [6.0.8](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.0.7...@textlint/markdown-to-ast@6.0.8) (2018-04-02) - - -### Bug Fixes - -* **markdown-to-ast:** enable yaml frontmatter parse by default ([121c62f](https://github.com/textlint/textlint/commit/121c62f)) - - - - - -## [6.0.7](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.0.6...@textlint/markdown-to-ast@6.0.7) (2018-04-02) - - -### Bug Fixes - -* **markdown-to-ast:** upgrade remark@^9.0.0 ([8a70c0f](https://github.com/textlint/textlint/commit/8a70c0f)) - - -### Chores - -* **markdown-to-ast:** update fixtures ([ac61a54](https://github.com/textlint/textlint/commit/ac61a54)) - - -### Tests - -* **markdown-to-ast:** add failure test case ([87f218d](https://github.com/textlint/textlint/commit/87f218d)) - - - - - -## [6.0.6](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.0.5...@textlint/markdown-to-ast@6.0.6) (2018-03-25) - - -### Chores - -* format ([d8f44db](https://github.com/textlint/textlint/commit/d8f44db)) - - - - - -## [6.0.5](https://github.com/textlint/textlint/compare/@textlint/markdown-to-ast@6.0.4...@textlint/markdown-to-ast@6.0.5) (2018-01-27) - - -### Code Refactoring - -* **ast-traverse:** update usage of [@textlint](https://github.com/textlint)/ast-traverse ([133ab5a](https://github.com/textlint/textlint/commit/133ab5a)) - - - - - -## 6.0.4 (2018-01-18) - - -### Bug Fixes - -* **markdown-to-ast:** add "publishConfig" ([929b575](https://github.com/textlint/textlint/commit/929b575)) - - - - - -## [6.0.3](https://github.com/textlint/textlint/compare/markdown-to-ast@6.0.2...markdown-to-ast@6.0.3) (2018-01-12) - - - - -**Note:** Version bump only for package markdown-to-ast - - -## [6.0.2](https://github.com/textlint/textlint/compare/markdown-to-ast@6.0.1...markdown-to-ast@6.0.2) (2017-12-31) - - - - -**Note:** Version bump only for package markdown-to-ast - - -## [6.0.1](https://github.com/textlint/textlint/compare/markdown-to-ast@6.0.0...markdown-to-ast@6.0.1) (2017-12-25) - - - - -**Note:** Version bump only for package markdown-to-ast - - -# [6.0.0](https://github.com/textlint/textlint/compare/markdown-to-ast@6.0.0-next.0...markdown-to-ast@6.0.0) (2017-12-18) - - - - -**Note:** Version bump only for package markdown-to-ast - - -# [6.0.0-next.0](https://github.com/textlint/textlint/compare/markdown-to-ast@5.0.0...markdown-to-ast@6.0.0-next.0) (2017-12-15) - - - - -**Note:** Version bump only for package markdown-to-ast - - -# 4.0.0 (2017-05-06) - - -### Bug Fixes - -* **example:** fix path ([3047d10](https://github.com/textlint/textlint/commit/3047d10)) -* **example:** update ([a8e4941](https://github.com/textlint/textlint/commit/a8e4941)) -* **npm:** move debug module to dependency ([eef12f0](https://github.com/textlint/textlint/commit/eef12f0)) -* **npm:** move dev to dependecies ([8e7cd32](https://github.com/textlint/textlint/commit/8e7cd32)) -* **parse:** use thematicBreak as a map key ([b9fe7d1](https://github.com/textlint/textlint/commit/b9fe7d1)) -* **readme:** fix outdated link ([5697807](https://github.com/textlint/textlint/commit/5697807)), closes [#9](https://github.com/textlint/textlint/issues/9) -* **remark:** fix usage of remark ([8702d24](https://github.com/textlint/textlint/commit/8702d24)) -* **test:** fix test on Windows ([956c6d1](https://github.com/textlint/textlint/commit/956c6d1)) -* **test:** rename test case ([821c650](https://github.com/textlint/textlint/commit/821c650)) -* **test:** update to mdast latest ([9eee76d](https://github.com/textlint/textlint/commit/9eee76d)) -* **travis:** fix path to shell-script ([13c0174](https://github.com/textlint/textlint/commit/13c0174)) - - -### Features - -* **ast:** enable SetextHeader test ([f980d41](https://github.com/textlint/textlint/commit/f980d41)) -* **ast:** implement Break node ([fb41948](https://github.com/textlint/textlint/commit/fb41948)) -* **ast:** implement HorizontalRule node ([a01084c](https://github.com/textlint/textlint/commit/a01084c)) -* **ast:** implement Html node ([3b94068](https://github.com/textlint/textlint/commit/3b94068)) -* **ast:** implement some node ([f6bbd54](https://github.com/textlint/textlint/commit/f6bbd54)) -* **ast:** re-implement Code ([832cbd6](https://github.com/textlint/textlint/commit/832cbd6)) -* **ast:** re-implement CodeBlock ([902a6c8](https://github.com/textlint/textlint/commit/902a6c8)) -* **ast:** re-implement Emphasis node ([6faf2f3](https://github.com/textlint/textlint/commit/6faf2f3)) -* **ast:** re-implement Strong node ([307c037](https://github.com/textlint/textlint/commit/307c037)) -* **demo:** add title ([174ac1b](https://github.com/textlint/textlint/commit/174ac1b)) -* **markdown:** update remark to 4.x ([f44a326](https://github.com/textlint/textlint/commit/f44a326)) -* **markdown-to-ast:** update remark[@7](https://github.com/7) (#278) ([197d5d4](https://github.com/textlint/textlint/commit/197d5d4)) -* **parser:** add `loc` object to Document type node ([dcc7e2b](https://github.com/textlint/textlint/commit/dcc7e2b)) -* **parser:** add workaround for Header ([e79ab46](https://github.com/textlint/textlint/commit/e79ab46)) -* **parser:** correct `range` of `CodeBlock` ([583b1d7](https://github.com/textlint/textlint/commit/583b1d7)) -* **parser:** correct `raw` property of `BlockQuote` ([a38bdf9](https://github.com/textlint/textlint/commit/a38bdf9)) -* **parser:** correct `raw` property of `Code` ([ef4535f](https://github.com/textlint/textlint/commit/ef4535f)) -* **parser:** correct `raw` property of `Emph` ([6da8e88](https://github.com/textlint/textlint/commit/6da8e88)) -* **parser:** correct `raw` property of `Image` ([27f46d0](https://github.com/textlint/textlint/commit/27f46d0)) -* **parser:** correct `raw` property of `List` type ([59f1723](https://github.com/textlint/textlint/commit/59f1723)) -* **parser:** correct `raw` property of `Strong` ([2265122](https://github.com/textlint/textlint/commit/2265122)) -* **parser:** correct `raw` property of LiteItem ([8414d50](https://github.com/textlint/textlint/commit/8414d50)) -* **parser:** correct `start_column` of each nodes. ([408e97b](https://github.com/textlint/textlint/commit/408e97b)) -* **parser:** implement Header of `raw` ([740d11a](https://github.com/textlint/textlint/commit/740d11a)) -* **parser:** remove un-used properties ([f72fde8](https://github.com/textlint/textlint/commit/f72fde8)) -* **parser:** update to commonmark 0.15 ([f31eaa3](https://github.com/textlint/textlint/commit/f31eaa3)) -* **travis:** add npm run build ([69eaa48](https://github.com/textlint/textlint/commit/69eaa48)) -* **travis:** add travis badge ([7121865](https://github.com/textlint/textlint/commit/7121865)) -* **travis:** auto deploy to gh-page ([a686595](https://github.com/textlint/textlint/commit/a686595)) - - -### BREAKING CHANGES - -* **markdown-to-ast:** some parse result is changed. - -It is correct, but we think that is a breacking change. -This change have possibilities that the user update patch-version textlint and fail test. - -close https://github.com/textlint/markdown-to-ast/issues/14 diff --git a/node_modules/@textlint/markdown-to-ast/lib/index.js.map b/node_modules/@textlint/markdown-to-ast/lib/index.js.map deleted file mode 100644 index d3c9dce7..00000000 --- a/node_modules/@textlint/markdown-to-ast/lib/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,gBAAgB;AAChB,YAAY,CAAC;;;;;;AACb,qEAA0D;AAC1D,2DAAiE;AAaxC,uFAbhB,6BAAY,OAaU;AAZ/B,sDAAgC;AAChC,wEAAiD;AACjD,gDAA2B;AAC3B,aAAa;AACb,oDAA8B;AAC9B,aAAa;AACb,8DAAuC;AACvC,aAAa;AACb,0EAA6C;AAC7C,IAAM,KAAK,GAAG,eAAM,CAAC,2BAA2B,CAAC,CAAC;AAClD,IAAM,MAAM,GAAG,iBAAO,EAAE,CAAC,GAAG,CAAC,sBAAW,CAAC,CAAC,GAAG,CAAC,4BAAW,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC;AAIrE;;;;GAIG;AACH,SAAgB,KAAK,CAAoB,IAAY;IACjD,IAAM,GAAG,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IAC/B,IAAM,GAAG,GAAG,IAAI,2BAAgB,CAAC,IAAI,CAAC,CAAC;IACvC,kBAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,IAAa;QACzC,2CAA2C;QAC3C,IAAI,IAAI,CAAC,OAAO,EAAE;YACd,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAM,YAAY,GAAG,+BAAS,CAAC,IAAI,CAAC,IAA8B,CAAC,CAAC;gBACpE,IAAI,CAAC,YAAY,EAAE;oBACf,KAAK,CAAC,oBAAkB,YAAY,sBAAiB,IAAI,CAAC,IAAM,CAAC,CAAC;iBACrE;qBAAM;oBACH,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;iBAC5B;aACJ;YACD,uCAAuC;YACvC,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACf,IAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;gBAC/B,IAAM,mBAAmB,GAAG;oBACxB,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE;oBACpF,GAAG,EAAE,EAAE,IAAI,EAAE,QAAQ,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE;iBACjF,CAAC;gBACF,IAAM,KAAK,GAAG,GAAG,CAAC,eAAe,CAAC,mBAAmB,CAAC,CAAC;gBACvD,IAAI,CAAC,GAAG,GAAG,mBAAmB,CAAC;gBAC/B,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;gBACnB,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;gBAC1C,6DAA6D;gBAC7D,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,UAAU,EAAE;oBACpC,UAAU,EAAE,KAAK;oBACjB,YAAY,EAAE,KAAK;oBACnB,QAAQ,EAAE,KAAK;oBACf,KAAK,EAAE,QAAQ;iBAClB,CAAC,CAAC;aACN;SACJ;IACL,CAAC,CAAC,CAAC;IACH,OAAO,GAAG,CAAC;AACf,CAAC;AApCD,sBAoCC"} \ No newline at end of file diff --git a/node_modules/@textlint/markdown-to-ast/lib/mapping/markdown-syntax-map.js.map b/node_modules/@textlint/markdown-to-ast/lib/mapping/markdown-syntax-map.js.map deleted file mode 100644 index 3a411c3e..00000000 --- a/node_modules/@textlint/markdown-to-ast/lib/mapping/markdown-syntax-map.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"markdown-syntax-map.js","sourceRoot":"","sources":["../../src/mapping/markdown-syntax-map.ts"],"names":[],"mappings":"AAAA,gBAAgB;AAChB,YAAY,CAAC;;;AACb,2DAAwD;AAE3C,QAAA,SAAS,GAAG;IACrB,IAAI,EAAE,6BAAY,CAAC,QAAQ;IAC3B,SAAS,EAAE,6BAAY,CAAC,SAAS;IACjC,UAAU,EAAE,6BAAY,CAAC,UAAU;IACnC,QAAQ,EAAE,6BAAY,CAAC,QAAQ;IAC/B,IAAI,EAAE,6BAAY,CAAC,IAAI;IACvB,MAAM,EAAE,QAAQ;IAChB,OAAO,EAAE,6BAAY,CAAC,MAAM;IAC5B,IAAI,EAAE,6BAAY,CAAC,SAAS;IAC5B,SAAS,EAAE,6BAAY,CAAC,SAAS;IACjC,aAAa,EAAE,6BAAY,CAAC,cAAc;IAC1C,eAAe;IACf,IAAI,EAAE,6BAAY,CAAC,GAAG;IACtB,KAAK,EAAE,6BAAY,CAAC,KAAK;IACzB,QAAQ,EAAE,6BAAY,CAAC,QAAQ;IAC/B,MAAM,EAAE,6BAAY,CAAC,MAAM;IAC3B,IAAI,EAAE,6BAAY,CAAC,IAAI;IACvB,IAAI,EAAE,6BAAY,CAAC,IAAI;IACvB,KAAK,EAAE,6BAAY,CAAC,KAAK;IACzB,UAAU,EAAE,6BAAY,CAAC,IAAI;IAC7B,MAAM,EAAE,6BAAY,CAAC,MAAM;IAC3B,6BAA6B;IAC7B,oDAAoD;IACpD,IAAI,EAAE,MAAM;IACZ,KAAK,EAAE,OAAO;IACd,QAAQ,EAAE,UAAU;IACpB,SAAS,EAAE,WAAW;IACtB,aAAa,EAAE,eAAe;IAC9B,cAAc,EAAE,gBAAgB;IAChC,UAAU,EAAE,YAAY;IACxB;;OAEG;IACH,YAAY,EAAE,6BAAY,CAAC,YAAY;CAC1C,CAAC"} \ No newline at end of file diff --git a/node_modules/@textlint/markdown-to-ast/lib/index.d.ts b/node_modules/@textlint/markdown-to-ast/lib/src/index.d.ts similarity index 100% rename from node_modules/@textlint/markdown-to-ast/lib/index.d.ts rename to node_modules/@textlint/markdown-to-ast/lib/src/index.d.ts diff --git a/node_modules/@textlint/markdown-to-ast/lib/index.js b/node_modules/@textlint/markdown-to-ast/lib/src/index.js similarity index 51% rename from node_modules/@textlint/markdown-to-ast/lib/index.js rename to node_modules/@textlint/markdown-to-ast/lib/src/index.js index b066feba..1158d499 100644 --- a/node_modules/@textlint/markdown-to-ast/lib/index.js +++ b/node_modules/@textlint/markdown-to-ast/lib/src/index.js @@ -1,39 +1,38 @@ -// LICENSE : MIT "use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.parse = exports.Syntax = void 0; -var markdown_syntax_map_1 = require("./mapping/markdown-syntax-map"); -var ast_node_types_1 = require("@textlint/ast-node-types"); +const markdown_syntax_map_1 = require("./mapping/markdown-syntax-map"); +const ast_node_types_1 = require("@textlint/ast-node-types"); Object.defineProperty(exports, "Syntax", { enumerable: true, get: function () { return ast_node_types_1.ASTNodeTypes; } }); -var traverse_1 = __importDefault(require("traverse")); -var structured_source_1 = __importDefault(require("structured-source")); -var debug_1 = __importDefault(require("debug")); -// @ts-ignore -var unified_1 = __importDefault(require("unified")); -// @ts-ignore -var remark_parse_1 = __importDefault(require("remark-parse")); -// @ts-ignore -var remark_frontmatter_1 = __importDefault(require("remark-frontmatter")); -var debug = debug_1.default("@textlint/markdown-to-ast"); -var remark = unified_1.default().use(remark_parse_1.default).use(remark_frontmatter_1.default, ["yaml"]); +const traverse_1 = __importDefault(require("traverse")); +const debug_1 = __importDefault(require("debug")); +const parse_markdown_1 = require("./parse-markdown"); +const debug = debug_1.default("@textlint/markdown-to-ast"); /** * parse markdown text and return ast mapped location info. * @param {string} text * @returns {TxtNode} */ function parse(text) { - var ast = remark.parse(text); - var src = new structured_source_1.default(text); + // remark-parse's AST does not consider BOM + // AST's position does not +1 by BOM + // So, just trim BOM and parse it for `raw` property + // textlint's SourceCode also take same approach - trim BOM and check the position + // This means that the loading side need to consider BOM position - for example fs.readFile and text slice script. + // https://github.com/micromark/micromark/blob/0f19c1ac25964872a160d8b536878b125ddfe393/lib/preprocess.mjs#L29-L31 + const hasBOM = text.charCodeAt(0) === 0xfeff; + const textWithoutBOM = hasBOM ? text.slice(1) : text; + const ast = parse_markdown_1.parseMarkdown(textWithoutBOM); traverse_1.default(ast).forEach(function (node) { // eslint-disable-next-line no-invalid-this if (this.notLeaf) { if (node.type) { - var replacedType = markdown_syntax_map_1.SyntaxMap[node.type]; + const replacedType = markdown_syntax_map_1.SyntaxMap[node.type]; if (!replacedType) { - debug("replacedType : " + replacedType + " , node.type: " + node.type); + debug(`replacedType : ${replacedType} , node.type: ${node.type}`); } else { node.type = replacedType; @@ -41,16 +40,16 @@ function parse(text) { } // map `range`, `loc` and `raw` to node if (node.position) { - var position = node.position; - var positionCompensated = { + const position = node.position; + const positionCompensated = { start: { line: position.start.line, column: Math.max(position.start.column - 1, 0) }, end: { line: position.end.line, column: Math.max(position.end.column - 1, 0) } }; - var range = src.locationToRange(positionCompensated); + const range = [position.start.offset, position.end.offset]; node.loc = positionCompensated; node.range = range; - node.raw = text.slice(range[0], range[1]); - // Compatible for https://github.com/wooorm/unist, but hidden + node.raw = textWithoutBOM.slice(range[0], range[1]); + // Compatible for https://github.com/syntax-tree/unist, but it is hidden Object.defineProperty(node, "position", { enumerable: false, configurable: false, diff --git a/node_modules/@textlint/markdown-to-ast/lib/src/index.js.map b/node_modules/@textlint/markdown-to-ast/lib/src/index.js.map new file mode 100644 index 00000000..52eb535e --- /dev/null +++ b/node_modules/@textlint/markdown-to-ast/lib/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;AAAA,uEAA0D;AAC1D,6DAAiE;AAOxC,uFAPhB,6BAAY,OAOU;AAN/B,wDAAgC;AAChC,kDAA2B;AAC3B,qDAAiD;AAEjD,MAAM,KAAK,GAAG,eAAM,CAAC,2BAA2B,CAAC,CAAC;AAIlD;;;;GAIG;AACH,SAAgB,KAAK,CAAoB,IAAY;IACjD,2CAA2C;IAC3C,oCAAoC;IACpC,oDAAoD;IACpD,kFAAkF;IAClF,kHAAkH;IAClH,kHAAkH;IAClH,MAAM,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,MAAM,CAAC;IAC7C,MAAM,cAAc,GAAG,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACrD,MAAM,GAAG,GAAG,8BAAa,CAAC,cAAc,CAAC,CAAC;IAC1C,kBAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,IAAa;QACzC,2CAA2C;QAC3C,IAAI,IAAI,CAAC,OAAO,EAAE;YACd,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,MAAM,YAAY,GAAG,+BAAS,CAAC,IAAI,CAAC,IAA8B,CAAC,CAAC;gBACpE,IAAI,CAAC,YAAY,EAAE;oBACf,KAAK,CAAC,kBAAkB,YAAY,iBAAiB,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;iBACrE;qBAAM;oBACH,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;iBAC5B;aACJ;YACD,uCAAuC;YACvC,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACf,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;gBAC/B,MAAM,mBAAmB,GAAG;oBACxB,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE;oBACpF,GAAG,EAAE,EAAE,IAAI,EAAE,QAAQ,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE;iBACjF,CAAC;gBACF,MAAM,KAAK,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAqB,CAAC;gBAC/E,IAAI,CAAC,GAAG,GAAG,mBAAmB,CAAC;gBAC/B,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;gBACnB,IAAI,CAAC,GAAG,GAAG,cAAc,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;gBACpD,wEAAwE;gBACxE,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,UAAU,EAAE;oBACpC,UAAU,EAAE,KAAK;oBACjB,YAAY,EAAE,KAAK;oBACnB,QAAQ,EAAE,KAAK;oBACf,KAAK,EAAE,QAAQ;iBAClB,CAAC,CAAC;aACN;SACJ;IACL,CAAC,CAAC,CAAC;IACH,OAAO,GAAQ,CAAC;AACpB,CAAC;AA3CD,sBA2CC"} \ No newline at end of file diff --git a/node_modules/@textlint/markdown-to-ast/lib/mapping/markdown-syntax-map.d.ts b/node_modules/@textlint/markdown-to-ast/lib/src/mapping/markdown-syntax-map.d.ts similarity index 96% rename from node_modules/@textlint/markdown-to-ast/lib/mapping/markdown-syntax-map.d.ts rename to node_modules/@textlint/markdown-to-ast/lib/src/mapping/markdown-syntax-map.d.ts index 8daecf86..be7a4996 100644 --- a/node_modules/@textlint/markdown-to-ast/lib/mapping/markdown-syntax-map.d.ts +++ b/node_modules/@textlint/markdown-to-ast/lib/src/mapping/markdown-syntax-map.d.ts @@ -25,6 +25,7 @@ export declare const SyntaxMap: { tableCell: string; linkReference: string; imageReference: string; + footnoteReference: string; definition: string; /** * @deprecated diff --git a/node_modules/@textlint/markdown-to-ast/lib/mapping/markdown-syntax-map.js b/node_modules/@textlint/markdown-to-ast/lib/src/mapping/markdown-syntax-map.js similarity index 93% rename from node_modules/@textlint/markdown-to-ast/lib/mapping/markdown-syntax-map.js rename to node_modules/@textlint/markdown-to-ast/lib/src/mapping/markdown-syntax-map.js index 07749ae6..28128faf 100644 --- a/node_modules/@textlint/markdown-to-ast/lib/mapping/markdown-syntax-map.js +++ b/node_modules/@textlint/markdown-to-ast/lib/src/mapping/markdown-syntax-map.js @@ -2,7 +2,7 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.SyntaxMap = void 0; -var ast_node_types_1 = require("@textlint/ast-node-types"); +const ast_node_types_1 = require("@textlint/ast-node-types"); exports.SyntaxMap = { root: ast_node_types_1.ASTNodeTypes.Document, paragraph: ast_node_types_1.ASTNodeTypes.Paragraph, @@ -32,6 +32,7 @@ exports.SyntaxMap = { tableCell: "TableCell", linkReference: "LinkReference", imageReference: "ImageReference", + footnoteReference: "FootnoteReference", definition: "Definition", /** * @deprecated diff --git a/node_modules/@textlint/markdown-to-ast/lib/src/mapping/markdown-syntax-map.js.map b/node_modules/@textlint/markdown-to-ast/lib/src/mapping/markdown-syntax-map.js.map new file mode 100644 index 00000000..11137534 --- /dev/null +++ b/node_modules/@textlint/markdown-to-ast/lib/src/mapping/markdown-syntax-map.js.map @@ -0,0 +1 @@ +{"version":3,"file":"markdown-syntax-map.js","sourceRoot":"","sources":["../../../src/mapping/markdown-syntax-map.ts"],"names":[],"mappings":"AAAA,gBAAgB;AAChB,YAAY,CAAC;;;AACb,6DAAwD;AAE3C,QAAA,SAAS,GAAG;IACrB,IAAI,EAAE,6BAAY,CAAC,QAAQ;IAC3B,SAAS,EAAE,6BAAY,CAAC,SAAS;IACjC,UAAU,EAAE,6BAAY,CAAC,UAAU;IACnC,QAAQ,EAAE,6BAAY,CAAC,QAAQ;IAC/B,IAAI,EAAE,6BAAY,CAAC,IAAI;IACvB,MAAM,EAAE,QAAQ;IAChB,OAAO,EAAE,6BAAY,CAAC,MAAM;IAC5B,IAAI,EAAE,6BAAY,CAAC,SAAS;IAC5B,SAAS,EAAE,6BAAY,CAAC,SAAS;IACjC,aAAa,EAAE,6BAAY,CAAC,cAAc;IAC1C,eAAe;IACf,IAAI,EAAE,6BAAY,CAAC,GAAG;IACtB,KAAK,EAAE,6BAAY,CAAC,KAAK;IACzB,QAAQ,EAAE,6BAAY,CAAC,QAAQ;IAC/B,MAAM,EAAE,6BAAY,CAAC,MAAM;IAC3B,IAAI,EAAE,6BAAY,CAAC,IAAI;IACvB,IAAI,EAAE,6BAAY,CAAC,IAAI;IACvB,KAAK,EAAE,6BAAY,CAAC,KAAK;IACzB,UAAU,EAAE,6BAAY,CAAC,IAAI;IAC7B,MAAM,EAAE,6BAAY,CAAC,MAAM;IAC3B,6BAA6B;IAC7B,oDAAoD;IACpD,IAAI,EAAE,MAAM;IACZ,KAAK,EAAE,OAAO;IACd,QAAQ,EAAE,UAAU;IACpB,SAAS,EAAE,WAAW;IACtB,aAAa,EAAE,eAAe;IAC9B,cAAc,EAAE,gBAAgB;IAChC,iBAAiB,EAAE,mBAAmB;IACtC,UAAU,EAAE,YAAY;IACxB;;OAEG;IACH,YAAY,EAAE,6BAAY,CAAC,YAAY;CAC1C,CAAC"} \ No newline at end of file diff --git a/node_modules/@textlint/markdown-to-ast/lib/src/parse-markdown.d.ts b/node_modules/@textlint/markdown-to-ast/lib/src/parse-markdown.d.ts new file mode 100644 index 00000000..c1c44d77 --- /dev/null +++ b/node_modules/@textlint/markdown-to-ast/lib/src/parse-markdown.d.ts @@ -0,0 +1 @@ +export declare const parseMarkdown: (text: string) => import("unist").Node; diff --git a/node_modules/@textlint/markdown-to-ast/lib/src/parse-markdown.js b/node_modules/@textlint/markdown-to-ast/lib/src/parse-markdown.js new file mode 100644 index 00000000..03f2ffe6 --- /dev/null +++ b/node_modules/@textlint/markdown-to-ast/lib/src/parse-markdown.js @@ -0,0 +1,26 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseMarkdown = void 0; +const unified_1 = __importDefault(require("unified")); +// @ts-ignore +const from_markdown_1 = __importDefault(require("mdast-util-gfm-autolink-literal/from-markdown")); +// FIXME: Disable auto link literal transforms that break AST node +// https://github.com/remarkjs/remark-gfm/issues/16 +// Need to override before import gfm plugin +from_markdown_1.default.transforms = []; +// Load plugins +const remark_gfm_1 = __importDefault(require("remark-gfm")); +const remark_parse_1 = __importDefault(require("remark-parse")); +const remark_frontmatter_1 = __importDefault(require("remark-frontmatter")); +const remark_footnotes_1 = __importDefault(require("remark-footnotes")); +const remark = unified_1.default().use(remark_parse_1.default).use(remark_frontmatter_1.default, ["yaml"]).use(remark_gfm_1.default).use(remark_footnotes_1.default, { + inlineNotes: true +}); +const parseMarkdown = (text) => { + return remark.parse(text); +}; +exports.parseMarkdown = parseMarkdown; +//# sourceMappingURL=parse-markdown.js.map \ No newline at end of file diff --git a/node_modules/@textlint/markdown-to-ast/lib/src/parse-markdown.js.map b/node_modules/@textlint/markdown-to-ast/lib/src/parse-markdown.js.map new file mode 100644 index 00000000..fdecf22f --- /dev/null +++ b/node_modules/@textlint/markdown-to-ast/lib/src/parse-markdown.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parse-markdown.js","sourceRoot":"","sources":["../../src/parse-markdown.ts"],"names":[],"mappings":";;;;;;AAAA,sDAA8B;AAC9B,aAAa;AACb,kGAA4E;AAC5E,kEAAkE;AAClE,mDAAmD;AACnD,4CAA4C;AAC5C,uBAAe,CAAC,UAAU,GAAG,EAAE,CAAC;AAChC,eAAe;AACf,4DAAmC;AACnC,gEAAuC;AACvC,4EAA6C;AAC7C,wEAAyC;AAEzC,MAAM,MAAM,GAAG,iBAAO,EAAE,CAAC,GAAG,CAAC,sBAAW,CAAC,CAAC,GAAG,CAAC,4BAAW,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,oBAAS,CAAC,CAAC,GAAG,CAAC,0BAAS,EAAE;IAC/F,WAAW,EAAE,IAAI;CACpB,CAAC,CAAC;AACI,MAAM,aAAa,GAAG,CAAC,IAAY,EAAE,EAAE;IAC1C,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC,CAAC;AAFW,QAAA,aAAa,iBAExB"} \ No newline at end of file diff --git a/node_modules/@textlint/markdown-to-ast/module/index.js.map b/node_modules/@textlint/markdown-to-ast/module/index.js.map deleted file mode 100644 index 889b6e7a..00000000 --- a/node_modules/@textlint/markdown-to-ast/module/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,gBAAgB;AAChB,YAAY,CAAC;AACb,OAAO,EAAE,SAAS,EAAE,MAAM,+BAA+B,CAAC;AAC1D,OAAO,EAAE,YAAY,EAAW,MAAM,0BAA0B,CAAC;AACjE,OAAO,QAAQ,MAAM,UAAU,CAAC;AAChC,OAAO,gBAAgB,MAAM,mBAAmB,CAAC;AACjD,OAAO,MAAM,MAAM,OAAO,CAAC;AAC3B,aAAa;AACb,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,aAAa;AACb,OAAO,WAAW,MAAM,cAAc,CAAC;AACvC,aAAa;AACb,OAAO,WAAW,MAAM,oBAAoB,CAAC;AAC7C,IAAM,KAAK,GAAG,MAAM,CAAC,2BAA2B,CAAC,CAAC;AAClD,IAAM,MAAM,GAAG,OAAO,EAAE,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC;AAErE,OAAO,EAAE,YAAY,IAAI,MAAM,EAAE,CAAC;AAElC;;;;GAIG;AACH,MAAM,UAAU,KAAK,CAAoB,IAAY;IACjD,IAAM,GAAG,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IAC/B,IAAM,GAAG,GAAG,IAAI,gBAAgB,CAAC,IAAI,CAAC,CAAC;IACvC,QAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,IAAa;QACzC,2CAA2C;QAC3C,IAAI,IAAI,CAAC,OAAO,EAAE;YACd,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAM,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,IAA8B,CAAC,CAAC;gBACpE,IAAI,CAAC,YAAY,EAAE;oBACf,KAAK,CAAC,oBAAkB,YAAY,sBAAiB,IAAI,CAAC,IAAM,CAAC,CAAC;iBACrE;qBAAM;oBACH,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;iBAC5B;aACJ;YACD,uCAAuC;YACvC,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACf,IAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;gBAC/B,IAAM,mBAAmB,GAAG;oBACxB,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE;oBACpF,GAAG,EAAE,EAAE,IAAI,EAAE,QAAQ,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE;iBACjF,CAAC;gBACF,IAAM,KAAK,GAAG,GAAG,CAAC,eAAe,CAAC,mBAAmB,CAAC,CAAC;gBACvD,IAAI,CAAC,GAAG,GAAG,mBAAmB,CAAC;gBAC/B,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;gBACnB,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;gBAC1C,6DAA6D;gBAC7D,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,UAAU,EAAE;oBACpC,UAAU,EAAE,KAAK;oBACjB,YAAY,EAAE,KAAK;oBACnB,QAAQ,EAAE,KAAK;oBACf,KAAK,EAAE,QAAQ;iBAClB,CAAC,CAAC;aACN;SACJ;IACL,CAAC,CAAC,CAAC;IACH,OAAO,GAAG,CAAC;AACf,CAAC"} \ No newline at end of file diff --git a/node_modules/@textlint/markdown-to-ast/module/mapping/markdown-syntax-map.js.map b/node_modules/@textlint/markdown-to-ast/module/mapping/markdown-syntax-map.js.map deleted file mode 100644 index 52925735..00000000 --- a/node_modules/@textlint/markdown-to-ast/module/mapping/markdown-syntax-map.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"markdown-syntax-map.js","sourceRoot":"","sources":["../../src/mapping/markdown-syntax-map.ts"],"names":[],"mappings":"AAAA,gBAAgB;AAChB,YAAY,CAAC;AACb,OAAO,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAExD,MAAM,CAAC,IAAM,SAAS,GAAG;IACrB,IAAI,EAAE,YAAY,CAAC,QAAQ;IAC3B,SAAS,EAAE,YAAY,CAAC,SAAS;IACjC,UAAU,EAAE,YAAY,CAAC,UAAU;IACnC,QAAQ,EAAE,YAAY,CAAC,QAAQ;IAC/B,IAAI,EAAE,YAAY,CAAC,IAAI;IACvB,MAAM,EAAE,QAAQ;IAChB,OAAO,EAAE,YAAY,CAAC,MAAM;IAC5B,IAAI,EAAE,YAAY,CAAC,SAAS;IAC5B,SAAS,EAAE,YAAY,CAAC,SAAS;IACjC,aAAa,EAAE,YAAY,CAAC,cAAc;IAC1C,eAAe;IACf,IAAI,EAAE,YAAY,CAAC,GAAG;IACtB,KAAK,EAAE,YAAY,CAAC,KAAK;IACzB,QAAQ,EAAE,YAAY,CAAC,QAAQ;IAC/B,MAAM,EAAE,YAAY,CAAC,MAAM;IAC3B,IAAI,EAAE,YAAY,CAAC,IAAI;IACvB,IAAI,EAAE,YAAY,CAAC,IAAI;IACvB,KAAK,EAAE,YAAY,CAAC,KAAK;IACzB,UAAU,EAAE,YAAY,CAAC,IAAI;IAC7B,MAAM,EAAE,YAAY,CAAC,MAAM;IAC3B,6BAA6B;IAC7B,oDAAoD;IACpD,IAAI,EAAE,MAAM;IACZ,KAAK,EAAE,OAAO;IACd,QAAQ,EAAE,UAAU;IACpB,SAAS,EAAE,WAAW;IACtB,aAAa,EAAE,eAAe;IAC9B,cAAc,EAAE,gBAAgB;IAChC,UAAU,EAAE,YAAY;IACxB;;OAEG;IACH,YAAY,EAAE,YAAY,CAAC,YAAY;CAC1C,CAAC"} \ No newline at end of file diff --git a/node_modules/@textlint/markdown-to-ast/module/index.d.ts b/node_modules/@textlint/markdown-to-ast/module/src/index.d.ts similarity index 100% rename from node_modules/@textlint/markdown-to-ast/module/index.d.ts rename to node_modules/@textlint/markdown-to-ast/module/src/index.d.ts diff --git a/node_modules/@textlint/markdown-to-ast/module/index.js b/node_modules/@textlint/markdown-to-ast/module/src/index.js similarity index 53% rename from node_modules/@textlint/markdown-to-ast/module/index.js rename to node_modules/@textlint/markdown-to-ast/module/src/index.js index ba6c83ea..3ff40037 100644 --- a/node_modules/@textlint/markdown-to-ast/module/index.js +++ b/node_modules/@textlint/markdown-to-ast/module/src/index.js @@ -1,18 +1,9 @@ -// LICENSE : MIT -"use strict"; import { SyntaxMap } from "./mapping/markdown-syntax-map"; import { ASTNodeTypes } from "@textlint/ast-node-types"; import traverse from "traverse"; -import StructuredSource from "structured-source"; import debug0 from "debug"; -// @ts-ignore -import unified from "unified"; -// @ts-ignore -import remarkParse from "remark-parse"; -// @ts-ignore -import frontmatter from "remark-frontmatter"; -var debug = debug0("@textlint/markdown-to-ast"); -var remark = unified().use(remarkParse).use(frontmatter, ["yaml"]); +import { parseMarkdown } from "./parse-markdown"; +const debug = debug0("@textlint/markdown-to-ast"); export { ASTNodeTypes as Syntax }; /** * parse markdown text and return ast mapped location info. @@ -20,15 +11,22 @@ export { ASTNodeTypes as Syntax }; * @returns {TxtNode} */ export function parse(text) { - var ast = remark.parse(text); - var src = new StructuredSource(text); + // remark-parse's AST does not consider BOM + // AST's position does not +1 by BOM + // So, just trim BOM and parse it for `raw` property + // textlint's SourceCode also take same approach - trim BOM and check the position + // This means that the loading side need to consider BOM position - for example fs.readFile and text slice script. + // https://github.com/micromark/micromark/blob/0f19c1ac25964872a160d8b536878b125ddfe393/lib/preprocess.mjs#L29-L31 + const hasBOM = text.charCodeAt(0) === 0xfeff; + const textWithoutBOM = hasBOM ? text.slice(1) : text; + const ast = parseMarkdown(textWithoutBOM); traverse(ast).forEach(function (node) { // eslint-disable-next-line no-invalid-this if (this.notLeaf) { if (node.type) { - var replacedType = SyntaxMap[node.type]; + const replacedType = SyntaxMap[node.type]; if (!replacedType) { - debug("replacedType : " + replacedType + " , node.type: " + node.type); + debug(`replacedType : ${replacedType} , node.type: ${node.type}`); } else { node.type = replacedType; @@ -36,16 +34,16 @@ export function parse(text) { } // map `range`, `loc` and `raw` to node if (node.position) { - var position = node.position; - var positionCompensated = { + const position = node.position; + const positionCompensated = { start: { line: position.start.line, column: Math.max(position.start.column - 1, 0) }, end: { line: position.end.line, column: Math.max(position.end.column - 1, 0) } }; - var range = src.locationToRange(positionCompensated); + const range = [position.start.offset, position.end.offset]; node.loc = positionCompensated; node.range = range; - node.raw = text.slice(range[0], range[1]); - // Compatible for https://github.com/wooorm/unist, but hidden + node.raw = textWithoutBOM.slice(range[0], range[1]); + // Compatible for https://github.com/syntax-tree/unist, but it is hidden Object.defineProperty(node, "position", { enumerable: false, configurable: false, diff --git a/node_modules/@textlint/markdown-to-ast/module/src/index.js.map b/node_modules/@textlint/markdown-to-ast/module/src/index.js.map new file mode 100644 index 00000000..53ce2517 --- /dev/null +++ b/node_modules/@textlint/markdown-to-ast/module/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,+BAA+B,CAAC;AAC1D,OAAO,EAAE,YAAY,EAAW,MAAM,0BAA0B,CAAC;AACjE,OAAO,QAAQ,MAAM,UAAU,CAAC;AAChC,OAAO,MAAM,MAAM,OAAO,CAAC;AAC3B,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AAEjD,MAAM,KAAK,GAAG,MAAM,CAAC,2BAA2B,CAAC,CAAC;AAElD,OAAO,EAAE,YAAY,IAAI,MAAM,EAAE,CAAC;AAElC;;;;GAIG;AACH,MAAM,UAAU,KAAK,CAAoB,IAAY;IACjD,2CAA2C;IAC3C,oCAAoC;IACpC,oDAAoD;IACpD,kFAAkF;IAClF,kHAAkH;IAClH,kHAAkH;IAClH,MAAM,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,MAAM,CAAC;IAC7C,MAAM,cAAc,GAAG,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACrD,MAAM,GAAG,GAAG,aAAa,CAAC,cAAc,CAAC,CAAC;IAC1C,QAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,IAAa;QACzC,2CAA2C;QAC3C,IAAI,IAAI,CAAC,OAAO,EAAE;YACd,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,IAA8B,CAAC,CAAC;gBACpE,IAAI,CAAC,YAAY,EAAE;oBACf,KAAK,CAAC,kBAAkB,YAAY,iBAAiB,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;iBACrE;qBAAM;oBACH,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;iBAC5B;aACJ;YACD,uCAAuC;YACvC,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACf,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;gBAC/B,MAAM,mBAAmB,GAAG;oBACxB,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE;oBACpF,GAAG,EAAE,EAAE,IAAI,EAAE,QAAQ,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE;iBACjF,CAAC;gBACF,MAAM,KAAK,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAqB,CAAC;gBAC/E,IAAI,CAAC,GAAG,GAAG,mBAAmB,CAAC;gBAC/B,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;gBACnB,IAAI,CAAC,GAAG,GAAG,cAAc,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;gBACpD,wEAAwE;gBACxE,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,UAAU,EAAE;oBACpC,UAAU,EAAE,KAAK;oBACjB,YAAY,EAAE,KAAK;oBACnB,QAAQ,EAAE,KAAK;oBACf,KAAK,EAAE,QAAQ;iBAClB,CAAC,CAAC;aACN;SACJ;IACL,CAAC,CAAC,CAAC;IACH,OAAO,GAAQ,CAAC;AACpB,CAAC"} \ No newline at end of file diff --git a/node_modules/@textlint/markdown-to-ast/module/mapping/markdown-syntax-map.d.ts b/node_modules/@textlint/markdown-to-ast/module/src/mapping/markdown-syntax-map.d.ts similarity index 96% rename from node_modules/@textlint/markdown-to-ast/module/mapping/markdown-syntax-map.d.ts rename to node_modules/@textlint/markdown-to-ast/module/src/mapping/markdown-syntax-map.d.ts index 8daecf86..be7a4996 100644 --- a/node_modules/@textlint/markdown-to-ast/module/mapping/markdown-syntax-map.d.ts +++ b/node_modules/@textlint/markdown-to-ast/module/src/mapping/markdown-syntax-map.d.ts @@ -25,6 +25,7 @@ export declare const SyntaxMap: { tableCell: string; linkReference: string; imageReference: string; + footnoteReference: string; definition: string; /** * @deprecated diff --git a/node_modules/@textlint/markdown-to-ast/module/mapping/markdown-syntax-map.js b/node_modules/@textlint/markdown-to-ast/module/src/mapping/markdown-syntax-map.js similarity index 94% rename from node_modules/@textlint/markdown-to-ast/module/mapping/markdown-syntax-map.js rename to node_modules/@textlint/markdown-to-ast/module/src/mapping/markdown-syntax-map.js index 40295ff7..24135f46 100644 --- a/node_modules/@textlint/markdown-to-ast/module/mapping/markdown-syntax-map.js +++ b/node_modules/@textlint/markdown-to-ast/module/src/mapping/markdown-syntax-map.js @@ -1,7 +1,7 @@ // LICENSE : MIT "use strict"; import { ASTNodeTypes } from "@textlint/ast-node-types"; -export var SyntaxMap = { +export const SyntaxMap = { root: ASTNodeTypes.Document, paragraph: ASTNodeTypes.Paragraph, blockquote: ASTNodeTypes.BlockQuote, @@ -30,6 +30,7 @@ export var SyntaxMap = { tableCell: "TableCell", linkReference: "LinkReference", imageReference: "ImageReference", + footnoteReference: "FootnoteReference", definition: "Definition", /** * @deprecated diff --git a/node_modules/@textlint/markdown-to-ast/module/src/mapping/markdown-syntax-map.js.map b/node_modules/@textlint/markdown-to-ast/module/src/mapping/markdown-syntax-map.js.map new file mode 100644 index 00000000..20073f9a --- /dev/null +++ b/node_modules/@textlint/markdown-to-ast/module/src/mapping/markdown-syntax-map.js.map @@ -0,0 +1 @@ +{"version":3,"file":"markdown-syntax-map.js","sourceRoot":"","sources":["../../../src/mapping/markdown-syntax-map.ts"],"names":[],"mappings":"AAAA,gBAAgB;AAChB,YAAY,CAAC;AACb,OAAO,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAExD,MAAM,CAAC,MAAM,SAAS,GAAG;IACrB,IAAI,EAAE,YAAY,CAAC,QAAQ;IAC3B,SAAS,EAAE,YAAY,CAAC,SAAS;IACjC,UAAU,EAAE,YAAY,CAAC,UAAU;IACnC,QAAQ,EAAE,YAAY,CAAC,QAAQ;IAC/B,IAAI,EAAE,YAAY,CAAC,IAAI;IACvB,MAAM,EAAE,QAAQ;IAChB,OAAO,EAAE,YAAY,CAAC,MAAM;IAC5B,IAAI,EAAE,YAAY,CAAC,SAAS;IAC5B,SAAS,EAAE,YAAY,CAAC,SAAS;IACjC,aAAa,EAAE,YAAY,CAAC,cAAc;IAC1C,eAAe;IACf,IAAI,EAAE,YAAY,CAAC,GAAG;IACtB,KAAK,EAAE,YAAY,CAAC,KAAK;IACzB,QAAQ,EAAE,YAAY,CAAC,QAAQ;IAC/B,MAAM,EAAE,YAAY,CAAC,MAAM;IAC3B,IAAI,EAAE,YAAY,CAAC,IAAI;IACvB,IAAI,EAAE,YAAY,CAAC,IAAI;IACvB,KAAK,EAAE,YAAY,CAAC,KAAK;IACzB,UAAU,EAAE,YAAY,CAAC,IAAI;IAC7B,MAAM,EAAE,YAAY,CAAC,MAAM;IAC3B,6BAA6B;IAC7B,oDAAoD;IACpD,IAAI,EAAE,MAAM;IACZ,KAAK,EAAE,OAAO;IACd,QAAQ,EAAE,UAAU;IACpB,SAAS,EAAE,WAAW;IACtB,aAAa,EAAE,eAAe;IAC9B,cAAc,EAAE,gBAAgB;IAChC,iBAAiB,EAAE,mBAAmB;IACtC,UAAU,EAAE,YAAY;IACxB;;OAEG;IACH,YAAY,EAAE,YAAY,CAAC,YAAY;CAC1C,CAAC"} \ No newline at end of file diff --git a/node_modules/@textlint/markdown-to-ast/module/src/parse-markdown.d.ts b/node_modules/@textlint/markdown-to-ast/module/src/parse-markdown.d.ts new file mode 100644 index 00000000..c1c44d77 --- /dev/null +++ b/node_modules/@textlint/markdown-to-ast/module/src/parse-markdown.d.ts @@ -0,0 +1 @@ +export declare const parseMarkdown: (text: string) => import("unist").Node; diff --git a/node_modules/@textlint/markdown-to-ast/module/src/parse-markdown.js b/node_modules/@textlint/markdown-to-ast/module/src/parse-markdown.js new file mode 100644 index 00000000..b8fdcad8 --- /dev/null +++ b/node_modules/@textlint/markdown-to-ast/module/src/parse-markdown.js @@ -0,0 +1,19 @@ +import unified from "unified"; +// @ts-ignore +import autolinkLiteral from "mdast-util-gfm-autolink-literal/from-markdown"; +// FIXME: Disable auto link literal transforms that break AST node +// https://github.com/remarkjs/remark-gfm/issues/16 +// Need to override before import gfm plugin +autolinkLiteral.transforms = []; +// Load plugins +import remarkGfm from "remark-gfm"; +import remarkParse from "remark-parse"; +import frontmatter from "remark-frontmatter"; +import footnotes from "remark-footnotes"; +const remark = unified().use(remarkParse).use(frontmatter, ["yaml"]).use(remarkGfm).use(footnotes, { + inlineNotes: true +}); +export const parseMarkdown = (text) => { + return remark.parse(text); +}; +//# sourceMappingURL=parse-markdown.js.map \ No newline at end of file diff --git a/node_modules/@textlint/markdown-to-ast/module/src/parse-markdown.js.map b/node_modules/@textlint/markdown-to-ast/module/src/parse-markdown.js.map new file mode 100644 index 00000000..d89a5ebf --- /dev/null +++ b/node_modules/@textlint/markdown-to-ast/module/src/parse-markdown.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parse-markdown.js","sourceRoot":"","sources":["../../src/parse-markdown.ts"],"names":[],"mappings":"AAAA,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,aAAa;AACb,OAAO,eAAe,MAAM,+CAA+C,CAAC;AAC5E,kEAAkE;AAClE,mDAAmD;AACnD,4CAA4C;AAC5C,eAAe,CAAC,UAAU,GAAG,EAAE,CAAC;AAChC,eAAe;AACf,OAAO,SAAS,MAAM,YAAY,CAAC;AACnC,OAAO,WAAW,MAAM,cAAc,CAAC;AACvC,OAAO,WAAW,MAAM,oBAAoB,CAAC;AAC7C,OAAO,SAAS,MAAM,kBAAkB,CAAC;AAEzC,MAAM,MAAM,GAAG,OAAO,EAAE,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,SAAS,EAAE;IAC/F,WAAW,EAAE,IAAI;CACpB,CAAC,CAAC;AACH,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,IAAY,EAAE,EAAE;IAC1C,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@textlint/markdown-to-ast/package.json b/node_modules/@textlint/markdown-to-ast/package.json index d2985c3c..c6b24da9 100644 --- a/node_modules/@textlint/markdown-to-ast/package.json +++ b/node_modules/@textlint/markdown-to-ast/package.json @@ -1,6 +1,6 @@ { "name": "@textlint/markdown-to-ast", - "version": "6.3.4", + "version": "12.0.0", "description": "Parse Markdown to AST with location info.", "homepage": "https://github.com/textlint/textlint/tree/master/packages/@textlint/markdown-to-ast/", "bugs": { @@ -12,9 +12,9 @@ }, "license": "MIT", "author": "azu", - "main": "lib/index.js", - "module": "./module/index.js", - "types": "lib/index.d.ts", + "main": "lib/src/index.js", + "module": "module/src/index.js", + "types": "lib/src/index.d.ts", "directories": { "test": "test/" }, @@ -22,44 +22,45 @@ "bin/", "lib/", "module/", - "src/" + "src/", + "!*.tsbuildinfo" ], "scripts": { "build": "tsc -b && tsc -b tsconfig.module.json", - "clean": "rimraf lib/ module/ tsconfig.tsbuildinfo tsconfig.module.tsbuildinfo", + "clean": "rimraf lib/ module/", "example:build": "browserify example/js/index.js -o example/app/app.js", "prepublish": "npm run --if-present build", - "test": "mocha \"test/**/*.{js,ts}\"", + "test": "mocha \"test/**/*.ts\"", "updateSnapshot": "npm run build && node tools/update-fixtures.js", "watch": "tsc -b --watch" }, "dependencies": { - "@textlint/ast-node-types": "^4.4.2", + "@textlint/ast-node-types": "^12.0.0", "debug": "^4.3.1", - "remark-frontmatter": "^1.3.3", - "remark-parse": "^5.0.0", - "structured-source": "^3.0.2", + "remark-footnotes": "^3.0.0", + "remark-frontmatter": "^3.0.0", + "remark-gfm": "^1.0.0", + "remark-parse": "^9.0.0", "traverse": "^0.6.6", - "unified": "^6.2.0" + "unified": "^9.2.1" }, "devDependencies": { - "@textlint/ast-tester": "^2.3.4", + "@textlint/ast-tester": "^12.0.0", "@types/mocha": "^8.2.2", - "@types/node": "^14.14.35", - "@types/structured-source": "^3.0.0", + "@types/node": "^14.17.0", "@types/traverse": "^0.6.32", "browserify": "^16.5.2", "cross-env": "^7.0.3", "mkdirp": "^1.0.4", - "mocha": "^8.3.2", + "mocha": "^8.4.0", "power-assert": "^1.6.1", "rimraf": "^3.0.2", "ts-node": "^9.1.1", "ts-node-test-register": "^9.0.1", - "typescript": "^4.0.2" + "typescript": "^4.2.4" }, "publishConfig": { "access": "public" }, - "gitHead": "07c0497b387f09e41088b7f113555aa5e5451add" + "gitHead": "8b4fc20f5fb4d8e581bdcecff510d83e7701faa9" } diff --git a/node_modules/@textlint/markdown-to-ast/src/index.ts b/node_modules/@textlint/markdown-to-ast/src/index.ts index 466ce71e..6c3781b8 100644 --- a/node_modules/@textlint/markdown-to-ast/src/index.ts +++ b/node_modules/@textlint/markdown-to-ast/src/index.ts @@ -1,18 +1,10 @@ -// LICENSE : MIT -"use strict"; import { SyntaxMap } from "./mapping/markdown-syntax-map"; import { ASTNodeTypes, TxtNode } from "@textlint/ast-node-types"; import traverse from "traverse"; -import StructuredSource from "structured-source"; import debug0 from "debug"; -// @ts-ignore -import unified from "unified"; -// @ts-ignore -import remarkParse from "remark-parse"; -// @ts-ignore -import frontmatter from "remark-frontmatter"; +import { parseMarkdown } from "./parse-markdown"; + const debug = debug0("@textlint/markdown-to-ast"); -const remark = unified().use(remarkParse).use(frontmatter, ["yaml"]); export { ASTNodeTypes as Syntax }; @@ -22,8 +14,15 @@ export { ASTNodeTypes as Syntax }; * @returns {TxtNode} */ export function parse(text: string): T { - const ast = remark.parse(text); - const src = new StructuredSource(text); + // remark-parse's AST does not consider BOM + // AST's position does not +1 by BOM + // So, just trim BOM and parse it for `raw` property + // textlint's SourceCode also take same approach - trim BOM and check the position + // This means that the loading side need to consider BOM position - for example fs.readFile and text slice script. + // https://github.com/micromark/micromark/blob/0f19c1ac25964872a160d8b536878b125ddfe393/lib/preprocess.mjs#L29-L31 + const hasBOM = text.charCodeAt(0) === 0xfeff; + const textWithoutBOM = hasBOM ? text.slice(1) : text; + const ast = parseMarkdown(textWithoutBOM); traverse(ast).forEach(function (node: TxtNode) { // eslint-disable-next-line no-invalid-this if (this.notLeaf) { @@ -42,11 +41,11 @@ export function parse(text: string): T { start: { line: position.start.line, column: Math.max(position.start.column - 1, 0) }, end: { line: position.end.line, column: Math.max(position.end.column - 1, 0) } }; - const range = src.locationToRange(positionCompensated); + const range = [position.start.offset, position.end.offset] as [number, number]; node.loc = positionCompensated; node.range = range; - node.raw = text.slice(range[0], range[1]); - // Compatible for https://github.com/wooorm/unist, but hidden + node.raw = textWithoutBOM.slice(range[0], range[1]); + // Compatible for https://github.com/syntax-tree/unist, but it is hidden Object.defineProperty(node, "position", { enumerable: false, configurable: false, @@ -56,5 +55,5 @@ export function parse(text: string): T { } } }); - return ast; + return ast as T; } diff --git a/node_modules/@textlint/markdown-to-ast/src/mapping/markdown-syntax-map.ts b/node_modules/@textlint/markdown-to-ast/src/mapping/markdown-syntax-map.ts index 97a5cc16..ca393e45 100644 --- a/node_modules/@textlint/markdown-to-ast/src/mapping/markdown-syntax-map.ts +++ b/node_modules/@textlint/markdown-to-ast/src/mapping/markdown-syntax-map.ts @@ -31,6 +31,7 @@ export const SyntaxMap = { tableCell: "TableCell", linkReference: "LinkReference", imageReference: "ImageReference", + footnoteReference: "FootnoteReference", // textlint@12+ definition: "Definition", /** * @deprecated diff --git a/node_modules/@textlint/markdown-to-ast/src/parse-markdown.ts b/node_modules/@textlint/markdown-to-ast/src/parse-markdown.ts new file mode 100644 index 00000000..fa379619 --- /dev/null +++ b/node_modules/@textlint/markdown-to-ast/src/parse-markdown.ts @@ -0,0 +1,19 @@ +import unified from "unified"; +// @ts-ignore +import autolinkLiteral from "mdast-util-gfm-autolink-literal/from-markdown"; +// FIXME: Disable auto link literal transforms that break AST node +// https://github.com/remarkjs/remark-gfm/issues/16 +// Need to override before import gfm plugin +autolinkLiteral.transforms = []; +// Load plugins +import remarkGfm from "remark-gfm"; +import remarkParse from "remark-parse"; +import frontmatter from "remark-frontmatter"; +import footnotes from "remark-footnotes"; + +const remark = unified().use(remarkParse).use(frontmatter, ["yaml"]).use(remarkGfm).use(footnotes, { + inlineNotes: true +}); +export const parseMarkdown = (text: string) => { + return remark.parse(text); +}; diff --git a/node_modules/@types/mdast/LICENSE b/node_modules/@types/mdast/LICENSE new file mode 100644 index 00000000..4b1ad51b --- /dev/null +++ b/node_modules/@types/mdast/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/mdast/README.md b/node_modules/@types/mdast/README.md new file mode 100644 index 00000000..5c0a337a --- /dev/null +++ b/node_modules/@types/mdast/README.md @@ -0,0 +1,16 @@ +# Installation +> `npm install --save @types/mdast` + +# Summary +This package contains type definitions for Mdast (https://github.com/syntax-tree/mdast). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/mdast + +Additional Details + * Last updated: Sat, 07 Sep 2019 00:45:19 GMT + * Dependencies: @types/unist + * Global values: none + +# Credits +These definitions were written by Jun Lu . diff --git a/node_modules/@types/mdast/index.d.ts b/node_modules/@types/mdast/index.d.ts new file mode 100644 index 00000000..817fcce1 --- /dev/null +++ b/node_modules/@types/mdast/index.d.ts @@ -0,0 +1,217 @@ +// Type definitions for Mdast 3.0 +// Project: https://github.com/syntax-tree/mdast, https://github.com/wooorm/mdast +// Definitions by: Jun Lu +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped +// TypeScript Version: 3.0 + +import { Parent as UnistParent, Literal as UnistLiteral, Node } from 'unist'; + +export type AlignType = 'left' | 'right' | 'center' | null; + +export type ReferenceType = 'shortcut' | 'collapsed' | 'full'; + +export type Content = + | TopLevelContent + | ListContent + | TableContent + | RowContent + | PhrasingContent; + +export type TopLevelContent = + | BlockContent + | FrontmatterContent + | DefinitionContent; + +export type BlockContent = + | Paragraph + | Heading + | ThematicBreak + | Blockquote + | List + | Table + | HTML + | Code; + +export type FrontmatterContent = YAML; + +export type DefinitionContent = Definition | FootnoteDefinition; + +export type ListContent = ListItem; + +export type TableContent = TableRow; + +export type RowContent = TableCell; + +export type PhrasingContent = StaticPhrasingContent | Link | LinkReference; + +export type StaticPhrasingContent = + | Text + | Emphasis + | Strong + | Delete + | HTML + | InlineCode + | Break + | Image + | ImageReference + | Footnote + | FootnoteReference; + +export interface Parent extends UnistParent { + children: Content[]; +} + +export interface Literal extends UnistLiteral { + value: string; +} + +export interface Root extends Parent { + type: 'root'; +} + +export interface Paragraph extends Parent { + type: 'paragraph'; + children: PhrasingContent[]; +} + +export interface Heading extends Parent { + type: 'heading'; + depth: 1 | 2 | 3 | 4 | 5 | 6; + children: PhrasingContent[]; +} + +export interface ThematicBreak extends Node { + type: 'thematicBreak'; +} + +export interface Blockquote extends Parent { + type: 'blockquote'; + children: BlockContent[]; +} + +export interface List extends Parent { + type: 'list'; + ordered?: boolean; + start?: number; + spread?: boolean; + children: ListContent[]; +} + +export interface ListItem extends Parent { + type: 'listItem'; + checked?: boolean; + spread?: boolean; + children: BlockContent[]; +} + +export interface Table extends Parent { + type: 'table'; + align?: AlignType[]; + children: TableContent[]; +} + +export interface TableRow extends Parent { + type: 'tableRow'; + children: RowContent[]; +} + +export interface TableCell extends Parent { + type: 'tableCell'; + children: PhrasingContent[]; +} + +export interface HTML extends Literal { + type: 'html'; +} + +export interface Code extends Literal { + type: 'code'; + lang?: string; + meta?: string; +} + +export interface YAML extends Literal { + type: 'yaml'; +} + +export interface Definition extends Node, Association, Resource { + type: 'definition'; +} + +export interface FootnoteDefinition extends Parent, Association { + type: 'footnoteDefinition'; + children: BlockContent[]; +} + +export interface Text extends Literal { + type: 'text'; +} + +export interface Emphasis extends Parent { + type: 'emphasis'; + children: PhrasingContent[]; +} + +export interface Strong extends Parent { + type: 'strong'; + children: PhrasingContent[]; +} + +export interface Delete extends Parent { + type: 'delete'; + children: PhrasingContent[]; +} + +export interface InlineCode extends Literal { + type: 'inlineCode'; +} + +export interface Break extends Node { + type: 'break'; +} + +export interface Link extends Parent, Resource { + type: 'link'; + children: StaticPhrasingContent[]; +} + +export interface Image extends Node, Resource, Alternative { + type: 'image'; +} + +export interface LinkReference extends Parent, Reference { + type: 'linkReference'; + children: StaticPhrasingContent[]; +} + +export interface ImageReference extends Node, Reference, Alternative { + type: 'imageReference'; +} + +export interface Footnote extends Parent { + type: 'footnote'; + children: PhrasingContent[]; +} + +export interface FootnoteReference extends Node, Association { + type: 'footnoteReference'; +} + +// Mixin +export interface Resource { + url: string; + title?: string; +} + +export interface Association { + identifier: string; + label?: string; +} + +export interface Reference extends Association { + referenceType: ReferenceType; +} + +export interface Alternative { + alt?: string; +} diff --git a/node_modules/@types/mdast/package.json b/node_modules/@types/mdast/package.json new file mode 100644 index 00000000..23e52f45 --- /dev/null +++ b/node_modules/@types/mdast/package.json @@ -0,0 +1,26 @@ +{ + "name": "@types/mdast", + "version": "3.0.3", + "description": "TypeScript definitions for Mdast", + "license": "MIT", + "contributors": [ + { + "name": "Jun Lu", + "url": "https://github.com/lujun2", + "githubUsername": "lujun2" + } + ], + "main": "", + "types": "index", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/mdast" + }, + "scripts": {}, + "dependencies": { + "@types/unist": "*" + }, + "typesPublisherContentHash": "14d7fdbd7f31ef3975bd5e967ada84235c102b1be369cba397ced8b95ebe4e57", + "typeScriptVersion": "3.0" +} \ No newline at end of file diff --git a/node_modules/@types/unist/LICENSE b/node_modules/@types/unist/LICENSE new file mode 100644 index 00000000..4b1ad51b --- /dev/null +++ b/node_modules/@types/unist/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/unist/README.md b/node_modules/@types/unist/README.md new file mode 100644 index 00000000..a15402a4 --- /dev/null +++ b/node_modules/@types/unist/README.md @@ -0,0 +1,16 @@ +# Installation +> `npm install --save @types/unist` + +# Summary +This package contains type definitions for non-npm package Unist ( https://github.com/syntax-tree/unist ). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/unist + +Additional Details + * Last updated: Thu, 14 Feb 2019 18:10:46 GMT + * Dependencies: none + * Global values: none + +# Credits +These definitions were written by bizen241 , Jun Lu , Hernan Rajchert , Titus Wormer , Junyoung Choi . diff --git a/node_modules/@types/unist/index.d.ts b/node_modules/@types/unist/index.d.ts new file mode 100644 index 00000000..e5e052db --- /dev/null +++ b/node_modules/@types/unist/index.d.ts @@ -0,0 +1,98 @@ +// Type definitions for non-npm package Unist 2.0 +// Project: https://github.com/syntax-tree/unist +// Definitions by: bizen241 +// Jun Lu +// Hernan Rajchert +// Titus Wormer +// Junyoung Choi +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped +// TypeScript Version: 3.0 + +/** + * Syntactic units in unist syntax trees are called nodes. + */ +export interface Node { + /** + * The variant of a node. + */ + type: string; + + /** + * Information from the ecosystem. + */ + data?: Data; + + /** + * Location of a node in a source document. + * Must not be present if a node is generated. + */ + position?: Position; + + [key: string]: unknown; +} + +/** + * Information associated by the ecosystem with the node. + * Space is guaranteed to never be specified by unist or specifications + * implementing unist. + */ +export interface Data { + [key: string]: unknown; +} + +/** + * Location of a node in a source file. + */ +export interface Position { + /** + * Place of the first character of the parsed source region. + */ + start: Point; + + /** + * Place of the first character after the parsed source region. + */ + end: Point; + + /** + * Start column at each index (plus start line) in the source region, + * for elements that span multiple lines. + */ + indent?: number[]; +} + +/** + * One place in a source file. + */ +export interface Point { + /** + * Line in a source file (1-indexed integer). + */ + line: number; + + /** + * Column in a source file (1-indexed integer). + */ + column: number; + /** + * Character in a source file (0-indexed integer). + */ + offset?: number; +} + +/** + * Nodes containing other nodes. + */ +export interface Parent extends Node { + /** + * List representing the children of a node. + */ + children: Node[]; +} + +/** + * Nodes containing a value. + */ +export interface Literal extends Node { + value: unknown; +} diff --git a/node_modules/@types/unist/package.json b/node_modules/@types/unist/package.json new file mode 100644 index 00000000..78fa6281 --- /dev/null +++ b/node_modules/@types/unist/package.json @@ -0,0 +1,43 @@ +{ + "name": "@types/unist", + "version": "2.0.3", + "description": "TypeScript definitions for non-npm package Unist", + "license": "MIT", + "contributors": [ + { + "name": "bizen241", + "url": "https://github.com/bizen241", + "githubUsername": "bizen241" + }, + { + "name": "Jun Lu", + "url": "https://github.com/lujun2", + "githubUsername": "lujun2" + }, + { + "name": "Hernan Rajchert", + "url": "https://github.com/hrajchert", + "githubUsername": "hrajchert" + }, + { + "name": "Titus Wormer", + "url": "https://github.com/wooorm", + "githubUsername": "wooorm" + }, + { + "name": "Junyoung Choi", + "url": "https://github.com/rokt33r", + "githubUsername": "rokt33r" + } + ], + "main": "", + "types": "index", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "555fe20f164ccded02a3f69d8b45c8c9d2ec6fd53844a7c7858a3001c281bc9b", + "typeScriptVersion": "3.0" +} \ No newline at end of file diff --git a/node_modules/before-after-hook/README.md b/node_modules/before-after-hook/README.md index 953ed41d..1439db3c 100644 --- a/node_modules/before-after-hook/README.md +++ b/node_modules/before-after-hook/README.md @@ -62,7 +62,7 @@ The methods are executed in the following order 1. `beforeHook` 2. `fetchFromDatabase` 3. `afterHook` -4. `getData` +4. `handleData` `beforeHook` can mutate `options` before it’s passed to `fetchFromDatabase`. @@ -70,10 +70,10 @@ If an error is thrown in `beforeHook` or `fetchFromDatabase` then `errorHook` is called next. If `afterHook` throws an error then `handleGetError` is called instead -of `getData`. +of `handleData`. If `errorHook` throws an error then `handleGetError` is called next, otherwise -`afterHook` and `getData`. +`afterHook` and `handleData`. You can also use `hook.wrap` to achieve the same thing as shown above (collection example): diff --git a/node_modules/before-after-hook/package.json b/node_modules/before-after-hook/package.json index 90a4e287..25f47564 100644 --- a/node_modules/before-after-hook/package.json +++ b/node_modules/before-after-hook/package.json @@ -1,6 +1,6 @@ { "name": "before-after-hook", - "version": "2.2.1", + "version": "2.2.2", "description": "asynchronous before/error/after hooks for internal functionality", "main": "index.js", "files": [ diff --git a/node_modules/boundary/README.md b/node_modules/boundary/README.md deleted file mode 100644 index e2d48af9..00000000 --- a/node_modules/boundary/README.md +++ /dev/null @@ -1,74 +0,0 @@ -boundary -============== - - -## About - -Provides binary search functions. - -## Installation - -```sh -npm install boundary -``` - -## Usage - -```js -function lessThan(v1, v2) { - return v1 < v2; -} - -// boundary.upperBound(sortedArray, value, compare = lessThan); -assert(upperBound([ 0, 0, 2, 3, 4 ], -1) === 0); -assert(upperBound([ 0, 0, 2, 3, 4 ], 0) === 2); -assert(upperBound([ 0, 0, 2, 3, 4 ], 1) === 2); -assert(upperBound([ 0, 0, 2, 3, 4 ], 2) === 3); -assert(upperBound([ 0, 0, 2, 3, 4 ], 3) === 4); -assert(upperBound([ 0, 0, 2, 3, 4 ], 4) === 5); -assert(upperBound([ 0, 0, 2, 3, 4 ], 5) === 5); - -// boundary.lowerBound(sortedArray, value, compare = lessThan); -assert(lowerBound([ 0, 0, 2, 3, 4 ], -1) === 0); -assert(lowerBound([ 0, 0, 2, 3, 4 ], 0) === 0); -assert(lowerBound([ 0, 0, 2, 3, 4 ], 1) === 2); -assert(lowerBound([ 0, 0, 2, 3, 4 ], 2) === 2); -assert(lowerBound([ 0, 0, 2, 3, 4 ], 3) === 3); -assert(lowerBound([ 0, 0, 2, 3, 4 ], 4) === 4); -assert(lowerBound([ 0, 0, 2, 3, 4 ], 5) === 5); - -// boundary.binarySearch(sortedArray, value, compare = lessThan); -assert(binarySearch([ 0, 0, 2, 3, 4 ], -1) === false); -assert(binarySearch([ 0, 0, 2, 3, 4 ], 0) === true); -assert(binarySearch([ 0, 0, 2, 3, 4 ], 1) === false); -assert(binarySearch([ 0, 0, 2, 3, 4 ], 2) === true); -assert(binarySearch([ 0, 0, 2, 3, 4 ], 3) === true); -assert(binarySearch([ 0, 0, 2, 3, 4 ], 4) === true); -assert(binarySearch([ 0, 0, 2, 3, 4 ], 5) === false); -``` - -### License - -Copyright (C) 2012-2014 [Yusuke Suzuki](http://github.com/Constellation) - (twitter: [@Constellation](http://twitter.com/Constellation)) and other contributors. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF -THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/boundary/lib/index.js b/node_modules/boundary/lib/index.js deleted file mode 100644 index f4fa26cb..00000000 --- a/node_modules/boundary/lib/index.js +++ /dev/null @@ -1,83 +0,0 @@ -"use strict"; - -/* - Copyright (C) 2014 Yusuke Suzuki - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - ARE DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY - DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES - (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND - ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF - THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -*/ - -function compare(v1, v2) { - return v1 < v2; -} - -function upperBound(array, value, comp) { - if (comp === undefined) comp = compare; - return (function () { - var len = array.length; - var i = 0; - - while (len) { - var diff = len >>> 1; - var cursor = i + diff; - if (comp(value, array[cursor])) { - len = diff; - } else { - i = cursor + 1; - len -= diff + 1; - } - } - return i; - })(); -} - -function lowerBound(array, value, comp) { - if (comp === undefined) comp = compare; - return (function () { - var len = array.length; - var i = 0; - - while (len) { - var diff = len >>> 1; - var cursor = i + diff; - if (comp(array[cursor], value)) { - i = cursor + 1; - len -= diff + 1; - } else { - len = diff; - } - } - return i; - })(); -} - -function binarySearch(array, value, comp) { - if (comp === undefined) comp = compare; - return (function () { - var cursor = lowerBound(array, value, comp); - return cursor !== array.length && !comp(value, array[cursor]); - })(); -} - -exports.compare = compare; -exports.lowerBound = lowerBound; -exports.upperBound = upperBound; -exports.binarySearch = binarySearch; -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbImluZGV4LmpzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7O0FBd0JBLFNBQVMsT0FBTyxDQUFDLEVBQUUsRUFBRSxFQUFFLEVBQUU7QUFDckIsU0FBTyxFQUFFLEdBQUcsRUFBRSxDQUFDO0NBQ2xCOztBQUVELFNBQVMsVUFBVSxDQUFDLEtBQUssRUFBRSxLQUFLLEVBQUUsSUFBSTtNQUFKLElBQUksZ0JBQUosSUFBSSxHQUFHLE9BQU87c0JBQUU7QUFDOUMsUUFBSSxHQUFHLEdBQUcsS0FBSyxDQUFDLE1BQU0sQ0FBQztBQUN2QixRQUFJLENBQUMsR0FBRyxDQUFDLENBQUM7O0FBRVYsV0FBTyxHQUFHLEVBQUU7QUFDUixVQUFJLElBQUksR0FBRyxHQUFHLEtBQUssQ0FBQyxDQUFDO0FBQ3JCLFVBQUksTUFBTSxHQUFHLENBQUMsR0FBRyxJQUFJLENBQUM7QUFDdEIsVUFBSSxJQUFJLENBQUMsS0FBSyxFQUFFLEtBQUssQ0FBQyxNQUFNLENBQUMsQ0FBQyxFQUFFO0FBQzVCLFdBQUcsR0FBRyxJQUFJLENBQUM7T0FDZCxNQUFNO0FBQ0gsU0FBQyxHQUFHLE1BQU0sR0FBRyxDQUFDLENBQUM7QUFDZixXQUFHLElBQUksSUFBSSxHQUFHLENBQUMsQ0FBQztPQUNuQjtLQUNKO0FBQ0QsV0FBTyxDQUFDLENBQUM7R0FDWjtDQUFBOztBQUVELFNBQVMsVUFBVSxDQUFDLEtBQUssRUFBRSxLQUFLLEVBQUUsSUFBSTtNQUFKLElBQUksZ0JBQUosSUFBSSxHQUFHLE9BQU87c0JBQUU7QUFDOUMsUUFBSSxHQUFHLEdBQUcsS0FBSyxDQUFDLE1BQU0sQ0FBQztBQUN2QixRQUFJLENBQUMsR0FBRyxDQUFDLENBQUM7O0FBRVYsV0FBTyxHQUFHLEVBQUU7QUFDUixVQUFJLElBQUksR0FBRyxHQUFHLEtBQUssQ0FBQyxDQUFDO0FBQ3JCLFVBQUksTUFBTSxHQUFHLENBQUMsR0FBRyxJQUFJLENBQUM7QUFDdEIsVUFBSSxJQUFJLENBQUMsS0FBSyxDQUFDLE1BQU0sQ0FBQyxFQUFFLEtBQUssQ0FBQyxFQUFFO0FBQzVCLFNBQUMsR0FBRyxNQUFNLEdBQUcsQ0FBQyxDQUFDO0FBQ2YsV0FBRyxJQUFJLElBQUksR0FBRyxDQUFDLENBQUM7T0FDbkIsTUFBTTtBQUNILFdBQUcsR0FBRyxJQUFJLENBQUM7T0FDZDtLQUNKO0FBQ0QsV0FBTyxDQUFDLENBQUM7R0FDWjtDQUFBOztBQUVELFNBQVMsWUFBWSxDQUFDLEtBQUssRUFBRSxLQUFLLEVBQUUsSUFBSTtNQUFKLElBQUksZ0JBQUosSUFBSSxHQUFHLE9BQU87c0JBQUU7QUFDaEQsUUFBSSxNQUFNLEdBQUcsVUFBVSxDQUFDLEtBQUssRUFBRSxLQUFLLEVBQUUsSUFBSSxDQUFDLENBQUM7QUFDNUMsV0FBTyxNQUFNLEtBQUssS0FBSyxDQUFDLE1BQU0sSUFBSSxDQUFDLElBQUksQ0FBQyxLQUFLLEVBQUUsS0FBSyxDQUFDLE1BQU0sQ0FBQyxDQUFDLENBQUM7R0FDakU7Q0FBQTs7UUFHRyxPQUFPLEdBQVAsT0FBTztRQUNQLFVBQVUsR0FBVixVQUFVO1FBQ1YsVUFBVSxHQUFWLFVBQVU7UUFDVixZQUFZLEdBQVosWUFBWSIsImZpbGUiOiJpbmRleC5qcyIsInNvdXJjZXNDb250ZW50IjpbIi8qXG4gIENvcHlyaWdodCAoQykgMjAxNCBZdXN1a2UgU3V6dWtpIDx1dGF0YW5lLnRlYUBnbWFpbC5jb20+XG5cbiAgUmVkaXN0cmlidXRpb24gYW5kIHVzZSBpbiBzb3VyY2UgYW5kIGJpbmFyeSBmb3Jtcywgd2l0aCBvciB3aXRob3V0XG4gIG1vZGlmaWNhdGlvbiwgYXJlIHBlcm1pdHRlZCBwcm92aWRlZCB0aGF0IHRoZSBmb2xsb3dpbmcgY29uZGl0aW9ucyBhcmUgbWV0OlxuXG4gICAgKiBSZWRpc3RyaWJ1dGlvbnMgb2Ygc291cmNlIGNvZGUgbXVzdCByZXRhaW4gdGhlIGFib3ZlIGNvcHlyaWdodFxuICAgICAgbm90aWNlLCB0aGlzIGxpc3Qgb2YgY29uZGl0aW9ucyBhbmQgdGhlIGZvbGxvd2luZyBkaXNjbGFpbWVyLlxuICAgICogUmVkaXN0cmlidXRpb25zIGluIGJpbmFyeSBmb3JtIG11c3QgcmVwcm9kdWNlIHRoZSBhYm92ZSBjb3B5cmlnaHRcbiAgICAgIG5vdGljZSwgdGhpcyBsaXN0IG9mIGNvbmRpdGlvbnMgYW5kIHRoZSBmb2xsb3dpbmcgZGlzY2xhaW1lciBpbiB0aGVcbiAgICAgIGRvY3VtZW50YXRpb24gYW5kL29yIG90aGVyIG1hdGVyaWFscyBwcm92aWRlZCB3aXRoIHRoZSBkaXN0cmlidXRpb24uXG5cbiAgVEhJUyBTT0ZUV0FSRSBJUyBQUk9WSURFRCBCWSBUSEUgQ09QWVJJR0hUIEhPTERFUlMgQU5EIENPTlRSSUJVVE9SUyBcIkFTIElTXCJcbiAgQU5EIEFOWSBFWFBSRVNTIE9SIElNUExJRUQgV0FSUkFOVElFUywgSU5DTFVESU5HLCBCVVQgTk9UIExJTUlURUQgVE8sIFRIRVxuICBJTVBMSUVEIFdBUlJBTlRJRVMgT0YgTUVSQ0hBTlRBQklMSVRZIEFORCBGSVRORVNTIEZPUiBBIFBBUlRJQ1VMQVIgUFVSUE9TRVxuICBBUkUgRElTQ0xBSU1FRC4gSU4gTk8gRVZFTlQgU0hBTEwgPENPUFlSSUdIVCBIT0xERVI+IEJFIExJQUJMRSBGT1IgQU5ZXG4gIERJUkVDVCwgSU5ESVJFQ1QsIElOQ0lERU5UQUwsIFNQRUNJQUwsIEVYRU1QTEFSWSwgT1IgQ09OU0VRVUVOVElBTCBEQU1BR0VTXG4gIChJTkNMVURJTkcsIEJVVCBOT1QgTElNSVRFRCBUTywgUFJPQ1VSRU1FTlQgT0YgU1VCU1RJVFVURSBHT09EUyBPUiBTRVJWSUNFUztcbiAgTE9TUyBPRiBVU0UsIERBVEEsIE9SIFBST0ZJVFM7IE9SIEJVU0lORVNTIElOVEVSUlVQVElPTikgSE9XRVZFUiBDQVVTRUQgQU5EXG4gIE9OIEFOWSBUSEVPUlkgT0YgTElBQklMSVRZLCBXSEVUSEVSIElOIENPTlRSQUNULCBTVFJJQ1QgTElBQklMSVRZLCBPUiBUT1JUXG4gIChJTkNMVURJTkcgTkVHTElHRU5DRSBPUiBPVEhFUldJU0UpIEFSSVNJTkcgSU4gQU5ZIFdBWSBPVVQgT0YgVEhFIFVTRSBPRlxuICBUSElTIFNPRlRXQVJFLCBFVkVOIElGIEFEVklTRUQgT0YgVEhFIFBPU1NJQklMSVRZIE9GIFNVQ0ggREFNQUdFLlxuKi9cblxuZnVuY3Rpb24gY29tcGFyZSh2MSwgdjIpIHtcbiAgICByZXR1cm4gdjEgPCB2Mjtcbn1cblxuZnVuY3Rpb24gdXBwZXJCb3VuZChhcnJheSwgdmFsdWUsIGNvbXAgPSBjb21wYXJlKSB7XG4gICAgbGV0IGxlbiA9IGFycmF5Lmxlbmd0aDtcbiAgICBsZXQgaSA9IDA7XG5cbiAgICB3aGlsZSAobGVuKSB7XG4gICAgICAgIGxldCBkaWZmID0gbGVuID4+PiAxO1xuICAgICAgICBsZXQgY3Vyc29yID0gaSArIGRpZmY7XG4gICAgICAgIGlmIChjb21wKHZhbHVlLCBhcnJheVtjdXJzb3JdKSkge1xuICAgICAgICAgICAgbGVuID0gZGlmZjtcbiAgICAgICAgfSBlbHNlIHtcbiAgICAgICAgICAgIGkgPSBjdXJzb3IgKyAxO1xuICAgICAgICAgICAgbGVuIC09IGRpZmYgKyAxO1xuICAgICAgICB9XG4gICAgfVxuICAgIHJldHVybiBpO1xufVxuXG5mdW5jdGlvbiBsb3dlckJvdW5kKGFycmF5LCB2YWx1ZSwgY29tcCA9IGNvbXBhcmUpIHtcbiAgICBsZXQgbGVuID0gYXJyYXkubGVuZ3RoO1xuICAgIGxldCBpID0gMDtcblxuICAgIHdoaWxlIChsZW4pIHtcbiAgICAgICAgbGV0IGRpZmYgPSBsZW4gPj4+IDE7XG4gICAgICAgIGxldCBjdXJzb3IgPSBpICsgZGlmZjtcbiAgICAgICAgaWYgKGNvbXAoYXJyYXlbY3Vyc29yXSwgdmFsdWUpKSB7XG4gICAgICAgICAgICBpID0gY3Vyc29yICsgMTtcbiAgICAgICAgICAgIGxlbiAtPSBkaWZmICsgMTtcbiAgICAgICAgfSBlbHNlIHtcbiAgICAgICAgICAgIGxlbiA9IGRpZmY7XG4gICAgICAgIH1cbiAgICB9XG4gICAgcmV0dXJuIGk7XG59XG5cbmZ1bmN0aW9uIGJpbmFyeVNlYXJjaChhcnJheSwgdmFsdWUsIGNvbXAgPSBjb21wYXJlKSB7XG4gICAgbGV0IGN1cnNvciA9IGxvd2VyQm91bmQoYXJyYXksIHZhbHVlLCBjb21wKTtcbiAgICByZXR1cm4gY3Vyc29yICE9PSBhcnJheS5sZW5ndGggJiYgIWNvbXAodmFsdWUsIGFycmF5W2N1cnNvcl0pO1xufVxuXG5leHBvcnQge1xuICAgIGNvbXBhcmUsXG4gICAgbG93ZXJCb3VuZCxcbiAgICB1cHBlckJvdW5kLFxuICAgIGJpbmFyeVNlYXJjaCxcbn1cblxuLyogdmltOiBzZXQgc3c9NCB0cz00IGV0IHR3PTgwIDogKi9cbiJdLCJzb3VyY2VSb290IjoiL3NvdXJjZS8ifQ== \ No newline at end of file diff --git a/node_modules/boundary/package.json b/node_modules/boundary/package.json deleted file mode 100644 index 3734d7d5..00000000 --- a/node_modules/boundary/package.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "name": "boundary", - "version": "1.0.1", - "description": "Provides boundary functions, (upper-bound and lower-bound).", - "author": "Yusuke SUZUKI", - "homepage": "https://github.com/Constellation/boundary", - "repository": { - "type": "git", - "url": "https://github.com/Constellation/boundary.git" - }, - "main": "lib/index.js", - "maintainers": [ - { - "name": "Yusuke SUZUKI", - "email": "utatane.tea@gmail.com", - "web": "http://github.com/Constellation" - } - ], - "files": [ - "lib" - ], - "scripts": { - "test": "gulp test" - }, - "dependencies": {}, - "devDependencies": { - "gulp": "^3.8.10", - "gulp-6to5": "^1.0.2", - "gulp-bump": "^0.1.11", - "gulp-espower": "^0.10.0", - "gulp-filter": "^2.0.0", - "gulp-git": "^0.5.5", - "gulp-mocha": "^2.0.0", - "gulp-sourcemaps": "^1.2.8", - "gulp-tag-version": "^1.2.1", - "power-assert": "^0.10.0" - }, - "keywords": [ - "algorithm" - ], - "bugs": { - "url": "https://github.com/Constellation/boundary/issues" - }, - "licenses": [ - { - "type": "BSD", - "url": "http://github.com/Constellation/boundary/raw/master/LICENSE.BSD" - } - ] -} diff --git a/node_modules/ccount/index.js b/node_modules/ccount/index.js new file mode 100644 index 00000000..61108527 --- /dev/null +++ b/node_modules/ccount/index.js @@ -0,0 +1,22 @@ +'use strict' + +module.exports = ccount + +function ccount(source, character) { + var value = String(source) + var count = 0 + var index + + if (typeof character !== 'string') { + throw new Error('Expected character') + } + + index = value.indexOf(character) + + while (index !== -1) { + count++ + index = value.indexOf(character, index + character.length) + } + + return count +} diff --git a/node_modules/collapse-white-space/license b/node_modules/ccount/license similarity index 100% rename from node_modules/collapse-white-space/license rename to node_modules/ccount/license diff --git a/node_modules/trim-trailing-lines/package.json b/node_modules/ccount/package.json similarity index 71% rename from node_modules/trim-trailing-lines/package.json rename to node_modules/ccount/package.json index 86119839..9cac7d2e 100644 --- a/node_modules/trim-trailing-lines/package.json +++ b/node_modules/ccount/package.json @@ -1,17 +1,15 @@ { - "name": "trim-trailing-lines", - "version": "1.1.4", - "description": "Remove final line feeds from a string", + "name": "ccount", + "version": "1.1.0", + "description": "Count characters", "license": "MIT", "keywords": [ - "trim", - "final", - "line", - "newline", - "characters" + "character", + "count", + "char" ], - "repository": "wooorm/trim-trailing-lines", - "bugs": "https://github.com/wooorm/trim-trailing-lines/issues", + "repository": "wooorm/ccount", + "bugs": "https://github.com/wooorm/ccount/issues", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -36,18 +34,17 @@ }, "scripts": { "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", - "build-bundle": "browserify . -s trimTrailingLines -o trim-trailing-lines.js", - "build-mangle": "browserify . -s trimTrailingLines -p tinyify -o trim-trailing-lines.min.js", + "build-bundle": "browserify . -s ccount -o ccount.js", + "build-mangle": "browserify . -s ccount -p tinyify -o ccount.min.js", "build": "npm run build-bundle && npm run build-mangle", "test-api": "node test", "test-coverage": "nyc --reporter lcov tape test.js", "test": "npm run format && npm run build && npm run test-coverage" }, - "nyc": { - "check-coverage": true, - "lines": 100, - "functions": 100, - "branches": 100 + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] }, "prettier": { "tabWidth": 2, @@ -61,12 +58,16 @@ "prettier": true, "esnext": false, "ignores": [ - "trim-trailing-lines.js" - ] + "ccount.js" + ], + "rules": { + "unicorn/prefer-type-error": "off" + } }, - "remarkConfig": { - "plugins": [ - "preset-wooorm" - ] + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 } } diff --git a/node_modules/ccount/readme.md b/node_modules/ccount/readme.md new file mode 100644 index 00000000..f3e41a91 --- /dev/null +++ b/node_modules/ccount/readme.md @@ -0,0 +1,68 @@ +# ccount + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] + +Count characters. + +## Install + +[npm][]: + +```sh +npm install ccount +``` + +## Use + +```js +var ccount = require('ccount') + +ccount('foo(bar(baz)', '(') // => 2 +ccount('foo(bar(baz)', ')') // => 1 +``` + +## API + +### `ccount(value, character)` + +Get the total count of `character` in `value`. + +###### Parameters + +* `value` (`string`) — Content, coerced to string +* `character` (`string`) — Single character to look for + +###### Returns + +`number` — Number of times `character` occurred in `value`. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://img.shields.io/travis/wooorm/ccount.svg + +[build]: https://travis-ci.org/wooorm/ccount + +[coverage-badge]: https://img.shields.io/codecov/c/github/wooorm/ccount.svg + +[coverage]: https://codecov.io/github/wooorm/ccount + +[downloads-badge]: https://img.shields.io/npm/dm/ccount.svg + +[downloads]: https://www.npmjs.com/package/ccount + +[size-badge]: https://img.shields.io/bundlephobia/minzip/ccount.svg + +[size]: https://bundlephobia.com/result?p=ccount + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com diff --git a/node_modules/collapse-white-space/index.js b/node_modules/collapse-white-space/index.js deleted file mode 100644 index 93d54669..00000000 --- a/node_modules/collapse-white-space/index.js +++ /dev/null @@ -1,8 +0,0 @@ -'use strict' - -module.exports = collapse - -// `collapse(' \t\nbar \nbaz\t') // ' bar baz '` -function collapse(value) { - return String(value).replace(/\s+/g, ' ') -} diff --git a/node_modules/collapse-white-space/readme.md b/node_modules/collapse-white-space/readme.md deleted file mode 100644 index 5154c9fe..00000000 --- a/node_modules/collapse-white-space/readme.md +++ /dev/null @@ -1,58 +0,0 @@ -# collapse-white-space - -[![Build][build-badge]][build] -[![Coverage][coverage-badge]][coverage] -[![Downloads][downloads-badge]][downloads] -[![Size][size-badge]][size] - -Replace multiple whitespace characters with a single space. - -## Install - -[npm][]: - -```sh -npm install collapse-white-space -``` - -## Use - -```js -var collapse = require('collapse-white-space') - -collapse('\tfoo \n\tbar \t\r\nbaz') //=> ' foo bar baz' -``` - -## API - -### `collapse(value)` - -Replace multiple whitespace characters in value with a single space. - -## License - -[MIT][license] © [Titus Wormer][author] - - - -[build-badge]: https://img.shields.io/travis/wooorm/collapse-white-space.svg - -[build]: https://travis-ci.org/wooorm/collapse-white-space - -[coverage-badge]: https://img.shields.io/codecov/c/github/wooorm/collapse-white-space.svg - -[coverage]: https://codecov.io/github/wooorm/collapse-white-space - -[downloads-badge]: https://img.shields.io/npm/dm/collapse-white-space.svg - -[downloads]: https://www.npmjs.com/package/collapse-white-space - -[size-badge]: https://img.shields.io/bundlephobia/minzip/collapse-white-space.svg - -[size]: https://bundlephobia.com/result?p=collapse-white-space - -[npm]: https://docs.npmjs.com/cli/install - -[license]: license - -[author]: https://wooorm.com diff --git a/node_modules/dom-serializer/README.md b/node_modules/dom-serializer/README.md index 7b14339e..9ef0eecc 100644 --- a/node_modules/dom-serializer/README.md +++ b/node_modules/dom-serializer/README.md @@ -1,6 +1,6 @@ # dom-serializer [![Build Status](https://travis-ci.com/cheeriojs/dom-serializer.svg?branch=master)](https://travis-ci.com/cheeriojs/dom-serializer) -Renders a DOM node or an array of DOM nodes to a string. +Renders a [domhandler](https://github.com/fb55/domhandler) DOM node or an array of domhandler DOM nodes to a string. ```js import render from "dom-serializer"; @@ -63,9 +63,9 @@ Print self-closing tags for tags without contents. **`default`** xmlMode -**`example`** With emptyAttrs: false: <foo></foo> +**`example`** With selfClosingTags: false: <foo></foo> -**`example`** With emptyAttrs: true: <foo /> +**`example`** With selfClosingTags: true: <foo /> --- @@ -81,4 +81,17 @@ If the value is `"foreign"`, it will try to correct mixed-case attribute names. --- +## Ecosystem + +| Name | Description | +| ------------------------------------------------------------- | ------------------------------------------------------- | +| [htmlparser2](https://github.com/fb55/htmlparser2) | Fast & forgiving HTML/XML parser | +| [domhandler](https://github.com/fb55/domhandler) | Handler for htmlparser2 that turns documents into a DOM | +| [domutils](https://github.com/fb55/domutils) | Utilities for working with domhandler's DOM | +| [css-select](https://github.com/fb55/css-select) | CSS selector engine, compatible with domhandler's DOM | +| [cheerio](https://github.com/cheeriojs/cheerio) | The jQuery API for domhandler's DOM | +| [dom-serializer](https://github.com/cheeriojs/dom-serializer) | Serializer for domhandler's DOM | + +--- + LICENSE: MIT diff --git a/node_modules/dom-serializer/lib/index.d.ts b/node_modules/dom-serializer/lib/index.d.ts index 6614d8cd..84f6d168 100644 --- a/node_modules/dom-serializer/lib/index.d.ts +++ b/node_modules/dom-serializer/lib/index.d.ts @@ -12,8 +12,8 @@ export interface DomSerializerOptions { * Print self-closing tags for tags without contents. * * @default xmlMode - * @example With emptyAttrs: false: <foo></foo> - * @example With emptyAttrs: true: <foo /> + * @example With selfClosingTags: false: <foo></foo> + * @example With selfClosingTags: true: <foo /> */ selfClosingTags?: boolean; /** @@ -39,5 +39,5 @@ export interface DomSerializerOptions { * @param node Node to be rendered. * @param options Changes serialization behavior */ -export default function render(node: Node | Node[], options?: DomSerializerOptions): string; +export default function render(node: Node | ArrayLike, options?: DomSerializerOptions): string; //# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/dom-serializer/lib/index.d.ts.map b/node_modules/dom-serializer/lib/index.d.ts.map index 0dd2f265..c04bdd48 100644 --- a/node_modules/dom-serializer/lib/index.d.ts.map +++ b/node_modules/dom-serializer/lib/index.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,IAAI,EAAuC,MAAM,YAAY,CAAC;AAW5E,MAAM,WAAW,oBAAoB;IACnC;;;;;;OAMG;IACH,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;;;;;OAMG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B;;;;;;OAMG;IACH,OAAO,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IAC9B;;;;OAIG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CAC1B;AAqED;;;;;;;GAOG;AACH,MAAM,CAAC,OAAO,UAAU,MAAM,CAC5B,IAAI,EAAE,IAAI,GAAG,IAAI,EAAE,EACnB,OAAO,GAAE,oBAAyB,GACjC,MAAM,CAYR"} \ No newline at end of file +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,IAAI,EAAuC,MAAM,YAAY,CAAC;AAW5E,MAAM,WAAW,oBAAoB;IACnC;;;;;;OAMG;IACH,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;;;;;OAMG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B;;;;;;OAMG;IACH,OAAO,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IAC9B;;;;OAIG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CAC1B;AAqED;;;;;;;GAOG;AACH,MAAM,CAAC,OAAO,UAAU,MAAM,CAC5B,IAAI,EAAE,IAAI,GAAG,SAAS,CAAC,IAAI,CAAC,EAC5B,OAAO,GAAE,oBAAyB,GACjC,MAAM,CAUR"} \ No newline at end of file diff --git a/node_modules/dom-serializer/lib/index.js b/node_modules/dom-serializer/lib/index.js index dff26da8..a5e1f936 100644 --- a/node_modules/dom-serializer/lib/index.js +++ b/node_modules/dom-serializer/lib/index.js @@ -109,8 +109,7 @@ var singleTag = new Set([ */ function render(node, options) { if (options === void 0) { options = {}; } - // TODO: This is a bit hacky. - var nodes = Array.isArray(node) || node.cheerio ? node : [node]; + var nodes = "length" in node ? node : [node]; var output = ""; for (var i = 0; i < nodes.length; i++) { output += renderNode(nodes[i], options); diff --git a/node_modules/dom-serializer/package.json b/node_modules/dom-serializer/package.json index 21f5a3a6..942931e8 100644 --- a/node_modules/dom-serializer/package.json +++ b/node_modules/dom-serializer/package.json @@ -1,7 +1,7 @@ { "name": "dom-serializer", - "version": "1.3.1", - "description": "render dom nodes to string", + "version": "1.3.2", + "description": "render domhandler DOM nodes to a string", "author": "Felix Boehm ", "sideEffects": false, "keywords": [ @@ -20,22 +20,22 @@ ], "dependencies": { "domelementtype": "^2.0.1", - "domhandler": "^4.0.0", + "domhandler": "^4.2.0", "entities": "^2.0.0" }, "devDependencies": { - "@types/jest": "^26.0.3", - "@types/node": "^14.10.1", - "@typescript-eslint/eslint-plugin": "^4.1.0", - "@typescript-eslint/parser": "^4.1.0", - "cheerio": "^1.0.0-rc.5", + "@types/jest": "^26.0.23", + "@types/node": "^15.3.0", + "@typescript-eslint/eslint-plugin": "^4.23.0", + "@typescript-eslint/parser": "^4.23.0", + "cheerio": "^1.0.0-rc.9", "coveralls": "^3.0.5", - "eslint": "^7.0.0", - "eslint-config-prettier": "^8.1.0", - "htmlparser2": "^6.0.0", + "eslint": "^7.26.0", + "eslint-config-prettier": "^8.3.0", + "htmlparser2": "^6.1.0", "jest": "^26.0.1", - "prettier": "^2.0.5", - "ts-jest": "^26.1.0", + "prettier": "^2.3.0", + "ts-jest": "^26.5.6", "typescript": "^4.0.2" }, "scripts": { diff --git a/node_modules/domutils/lib/stringify.d.ts b/node_modules/domutils/lib/stringify.d.ts index 1be81804..b5019d0e 100644 --- a/node_modules/domutils/lib/stringify.d.ts +++ b/node_modules/domutils/lib/stringify.d.ts @@ -15,10 +15,27 @@ export declare function getOuterHTML(node: Node | Node[], options?: DomSerialize */ export declare function getInnerHTML(node: Node, options?: DomSerializerOptions): string; /** - * Get a node's inner text. + * Get a node's inner text. Same as `textContent`, but inserts newlines for `
` tags. * + * @deprecated Use `textContent` instead. * @param node Node to get the inner text of. * @returns `node`'s inner text. */ export declare function getText(node: Node | Node[]): string; +/** + * Get a node's text content. + * + * @param node Node to get the text content of. + * @returns `node`'s text content. + * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Node/textContent} + */ +export declare function textContent(node: Node | Node[]): string; +/** + * Get a node's inner text. + * + * @param node Node to get the inner text of. + * @returns `node`'s inner text. + * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Node/innerText} + */ +export declare function innerText(node: Node | Node[]): string; //# sourceMappingURL=stringify.d.ts.map \ No newline at end of file diff --git a/node_modules/domutils/lib/stringify.d.ts.map b/node_modules/domutils/lib/stringify.d.ts.map index bf394788..38c77829 100644 --- a/node_modules/domutils/lib/stringify.d.ts.map +++ b/node_modules/domutils/lib/stringify.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"stringify.d.ts","sourceRoot":"","sources":["../src/stringify.ts"],"names":[],"mappings":"AAAA,OAAO,EAAuC,IAAI,EAAE,MAAM,YAAY,CAAC;AACvE,OAAmB,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAElE;;;;;GAKG;AACH,wBAAgB,YAAY,CACxB,IAAI,EAAE,IAAI,GAAG,IAAI,EAAE,EACnB,OAAO,CAAC,EAAE,oBAAoB,GAC/B,MAAM,CAER;AAED;;;;;GAKG;AACH,wBAAgB,YAAY,CACxB,IAAI,EAAE,IAAI,EACV,OAAO,CAAC,EAAE,oBAAoB,GAC/B,MAAM,CAIR;AAED;;;;;GAKG;AACH,wBAAgB,OAAO,CAAC,IAAI,EAAE,IAAI,GAAG,IAAI,EAAE,GAAG,MAAM,CAMnD"} \ No newline at end of file +{"version":3,"file":"stringify.d.ts","sourceRoot":"","sources":["../src/stringify.ts"],"names":[],"mappings":"AAAA,OAAO,EAAuC,IAAI,EAAE,MAAM,YAAY,CAAC;AACvE,OAAmB,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAGlE;;;;;GAKG;AACH,wBAAgB,YAAY,CACxB,IAAI,EAAE,IAAI,GAAG,IAAI,EAAE,EACnB,OAAO,CAAC,EAAE,oBAAoB,GAC/B,MAAM,CAER;AAED;;;;;GAKG;AACH,wBAAgB,YAAY,CACxB,IAAI,EAAE,IAAI,EACV,OAAO,CAAC,EAAE,oBAAoB,GAC/B,MAAM,CAIR;AAED;;;;;;GAMG;AACH,wBAAgB,OAAO,CAAC,IAAI,EAAE,IAAI,GAAG,IAAI,EAAE,GAAG,MAAM,CAMnD;AAED;;;;;;GAMG;AACH,wBAAgB,WAAW,CAAC,IAAI,EAAE,IAAI,GAAG,IAAI,EAAE,GAAG,MAAM,CAMvD;AAED;;;;;;GAMG;AACH,wBAAgB,SAAS,CAAC,IAAI,EAAE,IAAI,GAAG,IAAI,EAAE,GAAG,MAAM,CAQrD"} \ No newline at end of file diff --git a/node_modules/domutils/lib/stringify.js b/node_modules/domutils/lib/stringify.js index d5a89b5f..a8a56d0b 100644 --- a/node_modules/domutils/lib/stringify.js +++ b/node_modules/domutils/lib/stringify.js @@ -3,9 +3,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.getText = exports.getInnerHTML = exports.getOuterHTML = void 0; +exports.innerText = exports.textContent = exports.getText = exports.getInnerHTML = exports.getOuterHTML = void 0; var domhandler_1 = require("domhandler"); var dom_serializer_1 = __importDefault(require("dom-serializer")); +var domelementtype_1 = require("domelementtype"); /** * @param node Node to get the outer HTML of. * @param options Options for serialization. @@ -29,8 +30,9 @@ function getInnerHTML(node, options) { } exports.getInnerHTML = getInnerHTML; /** - * Get a node's inner text. + * Get a node's inner text. Same as `textContent`, but inserts newlines for `
` tags. * + * @deprecated Use `textContent` instead. * @param node Node to get the inner text of. * @returns `node`'s inner text. */ @@ -46,3 +48,42 @@ function getText(node) { return ""; } exports.getText = getText; +/** + * Get a node's text content. + * + * @param node Node to get the text content of. + * @returns `node`'s text content. + * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Node/textContent} + */ +function textContent(node) { + if (Array.isArray(node)) + return node.map(textContent).join(""); + if (domhandler_1.isTag(node)) + return textContent(node.children); + if (domhandler_1.isCDATA(node)) + return textContent(node.children); + if (domhandler_1.isText(node)) + return node.data; + return ""; +} +exports.textContent = textContent; +/** + * Get a node's inner text. + * + * @param node Node to get the inner text of. + * @returns `node`'s inner text. + * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Node/innerText} + */ +function innerText(node) { + if (Array.isArray(node)) + return node.map(innerText).join(""); + if (domhandler_1.hasChildren(node) && node.type === domelementtype_1.ElementType.Tag) { + return innerText(node.children); + } + if (domhandler_1.isCDATA(node)) + return innerText(node.children); + if (domhandler_1.isText(node)) + return node.data; + return ""; +} +exports.innerText = innerText; diff --git a/node_modules/domutils/package.json b/node_modules/domutils/package.json index 0cac965c..dedd619a 100644 --- a/node_modules/domutils/package.json +++ b/node_modules/domutils/package.json @@ -1,6 +1,6 @@ { "name": "domutils", - "version": "2.6.0", + "version": "2.7.0", "description": "Utilities for working with htmlparser2's dom", "author": "Felix Boehm ", "funding": { @@ -42,17 +42,17 @@ }, "devDependencies": { "@types/jest": "^26.0.0", - "@types/node": "^14.0.5", + "@types/node": "^15.0.1", "@typescript-eslint/eslint-plugin": "^4.1.0", "@typescript-eslint/parser": "^4.1.0", "coveralls": "^3.0.5", "eslint": "^7.0.0", "eslint-config-prettier": "^8.1.0", - "eslint-plugin-jsdoc": "^32.0.2", + "eslint-plugin-jsdoc": "^35.1.0", "htmlparser2": "~6.1.0", - "jest": "^26.0.1", + "jest": "^27.0.1", "prettier": "^2.0.5", - "ts-jest": "^26.0.0", + "ts-jest": "^27.0.1", "typedoc": "^0.20.5", "typescript": "^4.0.2" }, diff --git a/node_modules/escape-string-regexp/index.d.ts b/node_modules/escape-string-regexp/index.d.ts new file mode 100644 index 00000000..7d34edc7 --- /dev/null +++ b/node_modules/escape-string-regexp/index.d.ts @@ -0,0 +1,18 @@ +/** +Escape RegExp special characters. + +You can also use this to escape a string that is inserted into the middle of a regex, for example, into a character class. + +@example +``` +import escapeStringRegexp = require('escape-string-regexp'); + +const escapedString = escapeStringRegexp('How much $ for a 🦄?'); +//=> 'How much \\$ for a 🦄\\?' + +new RegExp(escapedString); +``` +*/ +declare const escapeStringRegexp: (string: string) => string; + +export = escapeStringRegexp; diff --git a/node_modules/escape-string-regexp/index.js b/node_modules/escape-string-regexp/index.js new file mode 100644 index 00000000..387c5615 --- /dev/null +++ b/node_modules/escape-string-regexp/index.js @@ -0,0 +1,13 @@ +'use strict'; + +module.exports = string => { + if (typeof string !== 'string') { + throw new TypeError('Expected a string'); + } + + // Escape characters with special meaning either inside or outside character sets. + // Use a simple backslash escape when it’s always valid, and a \unnnn escape when the simpler form would be disallowed by Unicode patterns’ stricter grammar. + return string + .replace(/[|\\{}()[\]^$+*?.]/g, '\\$&') + .replace(/-/g, '\\x2d'); +}; diff --git a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/LICENSE b/node_modules/escape-string-regexp/license similarity index 80% rename from node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/LICENSE rename to node_modules/escape-string-regexp/license index 57bee5f1..fa7ceba3 100644 --- a/node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/LICENSE +++ b/node_modules/escape-string-regexp/license @@ -1,7 +1,9 @@ -MIT License Copyright (c) 2019 Octokit contributors +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/escape-string-regexp/package.json b/node_modules/escape-string-regexp/package.json new file mode 100644 index 00000000..c6eb4a91 --- /dev/null +++ b/node_modules/escape-string-regexp/package.json @@ -0,0 +1,38 @@ +{ + "name": "escape-string-regexp", + "version": "4.0.0", + "description": "Escape RegExp special characters", + "license": "MIT", + "repository": "sindresorhus/escape-string-regexp", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "escape", + "regex", + "regexp", + "regular", + "expression", + "string", + "special", + "characters" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.11.0", + "xo": "^0.28.3" + } +} diff --git a/node_modules/escape-string-regexp/readme.md b/node_modules/escape-string-regexp/readme.md new file mode 100644 index 00000000..2945dfcb --- /dev/null +++ b/node_modules/escape-string-regexp/readme.md @@ -0,0 +1,34 @@ +# escape-string-regexp [![Build Status](https://travis-ci.org/sindresorhus/escape-string-regexp.svg?branch=master)](https://travis-ci.org/sindresorhus/escape-string-regexp) + +> Escape RegExp special characters + +## Install + +``` +$ npm install escape-string-regexp +``` + +## Usage + +```js +const escapeStringRegexp = require('escape-string-regexp'); + +const escapedString = escapeStringRegexp('How much $ for a 🦄?'); +//=> 'How much \\$ for a 🦄\\?' + +new RegExp(escapedString); +``` + +You can also use this to escape a string that is inserted into the middle of a regex, for example, into a character class. + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/inherits/LICENSE b/node_modules/inherits/LICENSE deleted file mode 100644 index dea3013d..00000000 --- a/node_modules/inherits/LICENSE +++ /dev/null @@ -1,16 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. - diff --git a/node_modules/inherits/README.md b/node_modules/inherits/README.md deleted file mode 100644 index b1c56658..00000000 --- a/node_modules/inherits/README.md +++ /dev/null @@ -1,42 +0,0 @@ -Browser-friendly inheritance fully compatible with standard node.js -[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). - -This package exports standard `inherits` from node.js `util` module in -node environment, but also provides alternative browser-friendly -implementation through [browser -field](https://gist.github.com/shtylman/4339901). Alternative -implementation is a literal copy of standard one located in standalone -module to avoid requiring of `util`. It also has a shim for old -browsers with no `Object.create` support. - -While keeping you sure you are using standard `inherits` -implementation in node.js environment, it allows bundlers such as -[browserify](https://github.com/substack/node-browserify) to not -include full `util` package to your client code if all you need is -just `inherits` function. It worth, because browser shim for `util` -package is large and `inherits` is often the single function you need -from it. - -It's recommended to use this package instead of -`require('util').inherits` for any code that has chances to be used -not only in node.js but in browser too. - -## usage - -```js -var inherits = require('inherits'); -// then use exactly as the standard one -``` - -## note on version ~1.0 - -Version ~1.0 had completely different motivation and is not compatible -neither with 2.0 nor with standard node.js `inherits`. - -If you are using version ~1.0 and planning to switch to ~2.0, be -careful: - -* new version uses `super_` instead of `super` for referencing - superclass -* new version overwrites current prototype while old one preserves any - existing fields on it diff --git a/node_modules/inherits/inherits.js b/node_modules/inherits/inherits.js deleted file mode 100644 index f71f2d93..00000000 --- a/node_modules/inherits/inherits.js +++ /dev/null @@ -1,9 +0,0 @@ -try { - var util = require('util'); - /* istanbul ignore next */ - if (typeof util.inherits !== 'function') throw ''; - module.exports = util.inherits; -} catch (e) { - /* istanbul ignore next */ - module.exports = require('./inherits_browser.js'); -} diff --git a/node_modules/inherits/inherits_browser.js b/node_modules/inherits/inherits_browser.js deleted file mode 100644 index 86bbb3dc..00000000 --- a/node_modules/inherits/inherits_browser.js +++ /dev/null @@ -1,27 +0,0 @@ -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }) - } - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor - } - } -} diff --git a/node_modules/inherits/package.json b/node_modules/inherits/package.json deleted file mode 100644 index 37b4366b..00000000 --- a/node_modules/inherits/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "inherits", - "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", - "version": "2.0.4", - "keywords": [ - "inheritance", - "class", - "klass", - "oop", - "object-oriented", - "inherits", - "browser", - "browserify" - ], - "main": "./inherits.js", - "browser": "./inherits_browser.js", - "repository": "git://github.com/isaacs/inherits", - "license": "ISC", - "scripts": { - "test": "tap" - }, - "devDependencies": { - "tap": "^14.2.4" - }, - "files": [ - "inherits.js", - "inherits_browser.js" - ] -} diff --git a/node_modules/is-buffer/README.md b/node_modules/is-buffer/README.md index cce0a8cf..8c9785f2 100644 --- a/node_modules/is-buffer/README.md +++ b/node_modules/is-buffer/README.md @@ -34,6 +34,7 @@ npm install is-buffer var isBuffer = require('is-buffer') isBuffer(new Buffer(4)) // true +isBuffer(Buffer.alloc(4)) //true isBuffer(undefined) // false isBuffer(null) // false diff --git a/node_modules/is-buffer/index.d.ts b/node_modules/is-buffer/index.d.ts new file mode 100644 index 00000000..7065c69f --- /dev/null +++ b/node_modules/is-buffer/index.d.ts @@ -0,0 +1,2 @@ +declare function isBuffer(obj: any): boolean +export = isBuffer diff --git a/node_modules/is-buffer/index.js b/node_modules/is-buffer/index.js index 9cce3965..da9bfdd7 100644 --- a/node_modules/is-buffer/index.js +++ b/node_modules/is-buffer/index.js @@ -5,17 +5,7 @@ * @license MIT */ -// The _isBuffer check is for Safari 5-7 support, because it's missing -// Object.prototype.constructor. Remove this eventually -module.exports = function (obj) { - return obj != null && (isBuffer(obj) || isSlowBuffer(obj) || !!obj._isBuffer) -} - -function isBuffer (obj) { - return !!obj.constructor && typeof obj.constructor.isBuffer === 'function' && obj.constructor.isBuffer(obj) -} - -// For Node v0.10 support. Remove this eventually. -function isSlowBuffer (obj) { - return typeof obj.readFloatLE === 'function' && typeof obj.slice === 'function' && isBuffer(obj.slice(0, 0)) +module.exports = function isBuffer (obj) { + return obj != null && obj.constructor != null && + typeof obj.constructor.isBuffer === 'function' && obj.constructor.isBuffer(obj) } diff --git a/node_modules/is-buffer/package.json b/node_modules/is-buffer/package.json index ea12137a..7cd70d42 100644 --- a/node_modules/is-buffer/package.json +++ b/node_modules/is-buffer/package.json @@ -1,37 +1,40 @@ { "name": "is-buffer", "description": "Determine if an object is a Buffer", - "version": "1.1.6", + "version": "2.0.5", "author": { "name": "Feross Aboukhadijeh", "email": "feross@feross.org", - "url": "http://feross.org/" + "url": "https://feross.org" }, "bugs": { "url": "https://github.com/feross/is-buffer/issues" }, "dependencies": {}, "devDependencies": { + "airtap": "^3.0.0", "standard": "*", - "tape": "^4.0.0", - "zuul": "^3.0.0" + "tape": "^5.0.1" + }, + "engines": { + "node": ">=4" }, "keywords": [ + "arraybuffer", + "browser", + "browser buffer", + "browserify", "buffer", "buffers", - "type", "core buffer", - "browser buffer", - "browserify", - "typed array", - "uint32array", - "int16array", - "int32array", + "dataview", "float32array", "float64array", - "browser", - "arraybuffer", - "dataview" + "int16array", + "int32array", + "type", + "typed array", + "uint32array" ], "license": "MIT", "main": "index.js", @@ -41,11 +44,22 @@ }, "scripts": { "test": "standard && npm run test-node && npm run test-browser", - "test-browser": "zuul -- test/*.js", - "test-browser-local": "zuul --local -- test/*.js", + "test-browser": "airtap -- test/*.js", + "test-browser-local": "airtap --local -- test/*.js", "test-node": "tape test/*.js" }, - "testling": { - "files": "test/*.js" - } + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] } diff --git a/node_modules/is-buffer/test/basic.js b/node_modules/is-buffer/test/basic.js deleted file mode 100644 index be4f8e43..00000000 --- a/node_modules/is-buffer/test/basic.js +++ /dev/null @@ -1,24 +0,0 @@ -var isBuffer = require('../') -var test = require('tape') - -test('is-buffer', function (t) { - t.equal(isBuffer(Buffer.alloc(4)), true, 'new Buffer(4)') - t.equal(isBuffer(Buffer.allocUnsafeSlow(100)), true, 'SlowBuffer(100)') - - t.equal(isBuffer(undefined), false, 'undefined') - t.equal(isBuffer(null), false, 'null') - t.equal(isBuffer(''), false, 'empty string') - t.equal(isBuffer(true), false, 'true') - t.equal(isBuffer(false), false, 'false') - t.equal(isBuffer(0), false, '0') - t.equal(isBuffer(1), false, '1') - t.equal(isBuffer(1.0), false, '1.0') - t.equal(isBuffer('string'), false, 'string') - t.equal(isBuffer({}), false, '{}') - t.equal(isBuffer([]), false, '[]') - t.equal(isBuffer(function foo () {}), false, 'function foo () {}') - t.equal(isBuffer({ isBuffer: null }), false, '{ isBuffer: null }') - t.equal(isBuffer({ isBuffer: function () { throw new Error() } }), false, '{ isBuffer: function () { throw new Error() } }') - - t.end() -}) diff --git a/node_modules/to-regex-range/node_modules/is-number/LICENSE b/node_modules/is-number/LICENSE similarity index 100% rename from node_modules/to-regex-range/node_modules/is-number/LICENSE rename to node_modules/is-number/LICENSE diff --git a/node_modules/to-regex-range/node_modules/is-number/README.md b/node_modules/is-number/README.md similarity index 100% rename from node_modules/to-regex-range/node_modules/is-number/README.md rename to node_modules/is-number/README.md diff --git a/node_modules/to-regex-range/node_modules/is-number/index.js b/node_modules/is-number/index.js similarity index 100% rename from node_modules/to-regex-range/node_modules/is-number/index.js rename to node_modules/is-number/index.js diff --git a/node_modules/to-regex-range/node_modules/is-number/package.json b/node_modules/is-number/package.json similarity index 100% rename from node_modules/to-regex-range/node_modules/is-number/package.json rename to node_modules/is-number/package.json diff --git a/node_modules/is-plain-obj/index.d.ts b/node_modules/is-plain-obj/index.d.ts new file mode 100644 index 00000000..ac2614d7 --- /dev/null +++ b/node_modules/is-plain-obj/index.d.ts @@ -0,0 +1,29 @@ +/** +Check if a value is a plain object. + +An object is plain if it's created by either `{}`, `new Object()`, or `Object.create(null)`. + +@example +``` +import isPlainObject = require('is-plain-obj'); + +isPlainObject({foo: 'bar'}); +//=> true + +isPlainObject(new Object()); +//=> true + +isPlainObject(Object.create(null)); +//=> true + +isPlainObject([1, 2, 3]); +//=> false + +class Unicorn {} +isPlainObject(new Unicorn()); +//=> false +``` +*/ +declare function isPlainObj(value: unknown): value is object; + +export = isPlainObj; diff --git a/node_modules/is-plain-obj/index.js b/node_modules/is-plain-obj/index.js index 0d1ba9ee..95079ec6 100644 --- a/node_modules/is-plain-obj/index.js +++ b/node_modules/is-plain-obj/index.js @@ -1,7 +1,10 @@ 'use strict'; -var toString = Object.prototype.toString; -module.exports = function (x) { - var prototype; - return toString.call(x) === '[object Object]' && (prototype = Object.getPrototypeOf(x), prototype === null || prototype === Object.getPrototypeOf({})); +module.exports = value => { + if (Object.prototype.toString.call(value) !== '[object Object]') { + return false; + } + + const prototype = Object.getPrototypeOf(value); + return prototype === null || prototype === Object.prototype; }; diff --git a/node_modules/is-plain-obj/license b/node_modules/is-plain-obj/license index 654d0bfe..e7af2f77 100644 --- a/node_modules/is-plain-obj/license +++ b/node_modules/is-plain-obj/license @@ -1,21 +1,9 @@ -The MIT License (MIT) +MIT License Copyright (c) Sindre Sorhus (sindresorhus.com) -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-plain-obj/package.json b/node_modules/is-plain-obj/package.json index d331f6e8..87512f17 100644 --- a/node_modules/is-plain-obj/package.json +++ b/node_modules/is-plain-obj/package.json @@ -1,36 +1,38 @@ { - "name": "is-plain-obj", - "version": "1.1.0", - "description": "Check if a value is a plain object", - "license": "MIT", - "repository": "sindresorhus/is-plain-obj", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=0.10.0" - }, - "scripts": { - "test": "node test.js" - }, - "files": [ - "index.js" - ], - "keywords": [ - "obj", - "object", - "is", - "check", - "test", - "type", - "plain", - "vanilla", - "pure", - "simple" - ], - "devDependencies": { - "ava": "0.0.4" - } + "name": "is-plain-obj", + "version": "2.1.0", + "description": "Check if a value is a plain object", + "license": "MIT", + "repository": "sindresorhus/is-plain-obj", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "object", + "is", + "check", + "test", + "type", + "plain", + "vanilla", + "pure", + "simple" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } } diff --git a/node_modules/is-plain-obj/readme.md b/node_modules/is-plain-obj/readme.md index 269e56ae..13571a87 100644 --- a/node_modules/is-plain-obj/readme.md +++ b/node_modules/is-plain-obj/readme.md @@ -2,25 +2,35 @@ > Check if a value is a plain object -An object is plain if it's created by either `{}`, `new Object()` or `Object.create(null)`. +An object is plain if it's created by either `{}`, `new Object()`, or `Object.create(null)`. ## Install ``` -$ npm install --save is-plain-obj +$ npm install is-plain-obj ``` ## Usage ```js -var isPlainObj = require('is-plain-obj'); +const isPlainObject = require('is-plain-obj'); -isPlainObj({foo: 'bar'}); +isPlainObject({foo: 'bar'}); //=> true -isPlainObj([1, 2, 3]); +isPlainObject(new Object()); +//=> true + +isPlainObject(Object.create(null)); +//=> true + +isPlainObject([1, 2, 3]); +//=> false + +class Unicorn {} +isPlainObject(new Unicorn()); //=> false ``` @@ -28,8 +38,17 @@ isPlainObj([1, 2, 3]); ## Related - [is-obj](https://github.com/sindresorhus/is-obj) - Check if a value is an object +- [is](https://github.com/sindresorhus/is) - Type check values -## License +--- -MIT © [Sindre Sorhus](http://sindresorhus.com) +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/@octokit/endpoint/node_modules/is-plain-object/LICENSE b/node_modules/is-plain-object/LICENSE similarity index 100% rename from node_modules/@octokit/endpoint/node_modules/is-plain-object/LICENSE rename to node_modules/is-plain-object/LICENSE diff --git a/node_modules/@octokit/endpoint/node_modules/is-plain-object/README.md b/node_modules/is-plain-object/README.md similarity index 100% rename from node_modules/@octokit/endpoint/node_modules/is-plain-object/README.md rename to node_modules/is-plain-object/README.md diff --git a/node_modules/@octokit/endpoint/node_modules/is-plain-object/dist/is-plain-object.js b/node_modules/is-plain-object/dist/is-plain-object.js similarity index 100% rename from node_modules/@octokit/endpoint/node_modules/is-plain-object/dist/is-plain-object.js rename to node_modules/is-plain-object/dist/is-plain-object.js diff --git a/node_modules/@octokit/endpoint/node_modules/is-plain-object/dist/is-plain-object.mjs b/node_modules/is-plain-object/dist/is-plain-object.mjs similarity index 100% rename from node_modules/@octokit/endpoint/node_modules/is-plain-object/dist/is-plain-object.mjs rename to node_modules/is-plain-object/dist/is-plain-object.mjs diff --git a/node_modules/@octokit/endpoint/node_modules/is-plain-object/is-plain-object.d.ts b/node_modules/is-plain-object/is-plain-object.d.ts similarity index 100% rename from node_modules/@octokit/endpoint/node_modules/is-plain-object/is-plain-object.d.ts rename to node_modules/is-plain-object/is-plain-object.d.ts diff --git a/node_modules/@octokit/endpoint/node_modules/is-plain-object/package.json b/node_modules/is-plain-object/package.json similarity index 100% rename from node_modules/@octokit/endpoint/node_modules/is-plain-object/package.json rename to node_modules/is-plain-object/package.json diff --git a/node_modules/is-whitespace-character/index.js b/node_modules/is-whitespace-character/index.js deleted file mode 100644 index 801c19f0..00000000 --- a/node_modules/is-whitespace-character/index.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -module.exports = whitespace - -var fromCode = String.fromCharCode -var re = /\s/ - -// Check if the given character code, or the character code at the first -// character, is a whitespace character. -function whitespace(character) { - return re.test( - typeof character === 'number' ? fromCode(character) : character.charAt(0) - ) -} diff --git a/node_modules/is-whitespace-character/package.json b/node_modules/is-whitespace-character/package.json deleted file mode 100644 index d6b35d9a..00000000 --- a/node_modules/is-whitespace-character/package.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "name": "is-whitespace-character", - "version": "1.0.4", - "description": "Check if a character is a whitespace character", - "license": "MIT", - "keywords": [ - "string", - "character", - "char", - "code", - "whitespace", - "white", - "space" - ], - "repository": "wooorm/is-whitespace-character", - "bugs": "https://github.com/wooorm/is-whitespace-character/issues", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - }, - "author": "Titus Wormer (https://wooorm.com)", - "contributors": [ - "Titus Wormer (https://wooorm.com)" - ], - "files": [ - "index.js" - ], - "dependencies": {}, - "devDependencies": { - "browserify": "^16.0.0", - "nyc": "^15.0.0", - "prettier": "^1.0.0", - "remark-cli": "^7.0.0", - "remark-preset-wooorm": "^6.0.0", - "tape": "^4.0.0", - "tinyify": "^2.0.0", - "xo": "^0.25.0" - }, - "scripts": { - "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", - "build-bundle": "browserify . -s isWhitespaceCharacter -o is-whitespace-character.js", - "build-mangle": "browserify . -s isWhitespaceCharacter -p tinyify -o is-whitespace-character.min.js", - "build": "npm run build-bundle && npm run build-mangle", - "test-api": "node test", - "test-coverage": "nyc --reporter lcov tape test.js", - "test": "npm run format && npm run build && npm run test-coverage" - }, - "prettier": { - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "bracketSpacing": false, - "semi": false, - "trailingComma": "none" - }, - "xo": { - "prettier": true, - "esnext": false, - "ignores": [ - "is-whitespace-character.js" - ] - }, - "nyc": { - "check-coverage": true, - "lines": 100, - "functions": 100, - "branches": 100 - }, - "remarkConfig": { - "plugins": [ - "preset-wooorm" - ] - } -} diff --git a/node_modules/is-whitespace-character/readme.md b/node_modules/is-whitespace-character/readme.md deleted file mode 100644 index 34d4f343..00000000 --- a/node_modules/is-whitespace-character/readme.md +++ /dev/null @@ -1,74 +0,0 @@ -# is-whitespace-character - -[![Build][build-badge]][build] -[![Coverage][coverage-badge]][coverage] -[![Downloads][downloads-badge]][downloads] -[![Size][size-badge]][size] - -Check if a character is a whitespace character: `\s`, which equals all Unicode -Space Separators (including `[ \t\v\f]`), the BOM (`\uFEFF`), and line -terminator (`[\n\r\u2028\u2029]`). - -## Install - -[npm][]: - -```sh -npm install is-whitespace-character -``` - -## Use - -```js -var whitespace = require('is-whitespace-character') - -whitespace(' ') // => true -whitespace('\n') // => true -whitespace('\uFEFF') // => true -whitespace('_') // => false -whitespace('a') // => false -whitespace('💩') // => false -``` - -## API - -### `whitespaceCharacter(character|code)` - -Check whether the given character code (`number`), or the character code at the -first position (`string`), is a whitespace character. - -## Related - -* [`is-alphabetical`](https://github.com/wooorm/is-alphabetical) -* [`is-alphanumerical`](https://github.com/wooorm/is-alphanumerical) -* [`is-decimal`](https://github.com/wooorm/is-decimal) -* [`is-hexadecimal`](https://github.com/wooorm/is-hexadecimal) -* [`is-word-character`](https://github.com/wooorm/is-word-character) - -## License - -[MIT][license] © [Titus Wormer][author] - - - -[build-badge]: https://img.shields.io/travis/wooorm/is-whitespace-character.svg - -[build]: https://travis-ci.org/wooorm/is-whitespace-character - -[coverage-badge]: https://img.shields.io/codecov/c/github/wooorm/is-whitespace-character.svg - -[coverage]: https://codecov.io/github/wooorm/is-whitespace-character - -[downloads-badge]: https://img.shields.io/npm/dm/is-whitespace-character.svg - -[downloads]: https://www.npmjs.com/package/is-whitespace-character - -[size-badge]: https://img.shields.io/bundlephobia/minzip/is-whitespace-character.svg - -[size]: https://bundlephobia.com/result?p=is-whitespace-character - -[npm]: https://docs.npmjs.com/cli/install - -[license]: license - -[author]: https://wooorm.com diff --git a/node_modules/is-word-character/index.js b/node_modules/is-word-character/index.js deleted file mode 100644 index 8c3537f9..00000000 --- a/node_modules/is-word-character/index.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -module.exports = wordCharacter - -var fromCode = String.fromCharCode -var re = /\w/ - -// Check if the given character code, or the character code at the first -// character, is a word character. -function wordCharacter(character) { - return re.test( - typeof character === 'number' ? fromCode(character) : character.charAt(0) - ) -} diff --git a/node_modules/is-word-character/readme.md b/node_modules/is-word-character/readme.md deleted file mode 100644 index 3c88ce97..00000000 --- a/node_modules/is-word-character/readme.md +++ /dev/null @@ -1,72 +0,0 @@ -# is-word-character - -[![Build][build-badge]][build] -[![Coverage][coverage-badge]][coverage] -[![Downloads][downloads-badge]][downloads] -[![Size][size-badge]][size] - -Check if a character is a word character (`\w`, which equals `[a-zA-Z0-9_]`). - -## Install - -[npm][]: - -```sh -npm install is-word-character -``` - -## Use - -```js -var wordCharacter = require('is-word-character') - -wordCharacter('a') // => true -wordCharacter('Z') // => true -wordCharacter('0') // => true -wordCharacter('_') // => true -wordCharacter(' ') // => false -wordCharacter('💩') // => false -``` - -## API - -### `wordCharacter(character|code)` - -Check whether the given character code (`number`), or the character code at the -first position (`string`), is a word character. - -## Related - -* [`is-alphabetical`](https://github.com/wooorm/is-alphabetical) -* [`is-alphanumerical`](https://github.com/wooorm/is-alphanumerical) -* [`is-decimal`](https://github.com/wooorm/is-decimal) -* [`is-hexadecimal`](https://github.com/wooorm/is-hexadecimal) -* [`is-whitespace-character`](https://github.com/wooorm/is-whitespace-character) - -## License - -[MIT][license] © [Titus Wormer][author] - - - -[build-badge]: https://img.shields.io/travis/wooorm/is-word-character.svg - -[build]: https://travis-ci.org/wooorm/is-word-character - -[coverage-badge]: https://img.shields.io/codecov/c/github/wooorm/is-word-character.svg - -[coverage]: https://codecov.io/github/wooorm/is-word-character - -[downloads-badge]: https://img.shields.io/npm/dm/is-word-character.svg - -[downloads]: https://www.npmjs.com/package/is-word-character - -[size-badge]: https://img.shields.io/bundlephobia/minzip/is-word-character.svg - -[size]: https://bundlephobia.com/result?p=is-word-character - -[npm]: https://docs.npmjs.com/cli/install - -[license]: license - -[author]: https://wooorm.com diff --git a/node_modules/longest-streak/index.js b/node_modules/longest-streak/index.js new file mode 100644 index 00000000..ec579877 --- /dev/null +++ b/node_modules/longest-streak/index.js @@ -0,0 +1,36 @@ +'use strict' + +module.exports = longestStreak + +// Get the count of the longest repeating streak of `character` in `value`. +function longestStreak(value, character) { + var count = 0 + var maximum = 0 + var expected + var index + + if (typeof character !== 'string' || character.length !== 1) { + throw new Error('Expected character') + } + + value = String(value) + index = value.indexOf(character) + expected = index + + while (index !== -1) { + count++ + + if (index === expected) { + if (count > maximum) { + maximum = count + } + } else { + count = 1 + } + + expected = index + 1 + index = value.indexOf(character, expected) + } + + return maximum +} diff --git a/node_modules/trim-trailing-lines/license b/node_modules/longest-streak/license similarity index 100% rename from node_modules/trim-trailing-lines/license rename to node_modules/longest-streak/license diff --git a/node_modules/is-word-character/package.json b/node_modules/longest-streak/package.json similarity index 72% rename from node_modules/is-word-character/package.json rename to node_modules/longest-streak/package.json index 42c262cf..0a7757a1 100644 --- a/node_modules/is-word-character/package.json +++ b/node_modules/longest-streak/package.json @@ -1,17 +1,18 @@ { - "name": "is-word-character", - "version": "1.0.4", - "description": "Check if a character is a word character", + "name": "longest-streak", + "version": "2.0.4", + "description": "Count the longest repeating streak of a character", "license": "MIT", "keywords": [ - "string", - "character", - "char", - "code", - "word" + "count", + "length", + "longest", + "repeating", + "streak", + "character" ], - "repository": "wooorm/is-word-character", - "bugs": "https://github.com/wooorm/is-word-character/issues", + "repository": "wooorm/longest-streak", + "bugs": "https://github.com/wooorm/longest-streak/issues", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -36,13 +37,19 @@ }, "scripts": { "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", - "build-bundle": "browserify . -s isWordCharacter -o is-word-character.js", - "build-mangle": "browserify . -s isWordCharacter -p tinyify -o is-word-character.min.js", + "build-bundle": "browserify . -s longestStreak -o longest-streak.js", + "build-mangle": "browserify . -s longestStreak -p tinyify -o longest-streak.min.js", "build": "npm run build-bundle && npm run build-mangle", "test-api": "node test", "test-coverage": "nyc --reporter lcov tape test.js", "test": "npm run format && npm run build && npm run test-coverage" }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, "prettier": { "tabWidth": 2, "useTabs": false, @@ -55,15 +62,9 @@ "prettier": true, "esnext": false, "ignores": [ - "is-word-character.js" + "longest-streak.js" ] }, - "nyc": { - "check-coverage": true, - "lines": 100, - "functions": 100, - "branches": 100 - }, "remarkConfig": { "plugins": [ "preset-wooorm" diff --git a/node_modules/longest-streak/readme.md b/node_modules/longest-streak/readme.md new file mode 100644 index 00000000..80dfa266 --- /dev/null +++ b/node_modules/longest-streak/readme.md @@ -0,0 +1,72 @@ +# longest-streak + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] + +Count the longest repeating streak of a character. + +## Install + +[npm][]: + +```sh +npm install longest-streak +``` + +## Use + +```js +var longestStreak = require('longest-streak') + +longestStreak('` foo `` bar `', '`') // => 2 +``` + +## API + +### `longestStreak(value, character)` + +Get the count of the longest repeating streak of `character` in `value`. + +###### Parameters + +* `value` (`string`) — Content, coerced to string. +* `character` (`string`) — Single character to look for. + +###### Returns + +`number` — Number of characters at the place where `character` occurs in +its longest streak in `value`. + +###### Throws + +* `Error` — when `character` is not a single character string. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://img.shields.io/travis/wooorm/longest-streak.svg + +[build]: https://travis-ci.org/wooorm/longest-streak + +[coverage-badge]: https://img.shields.io/codecov/c/github/wooorm/longest-streak.svg + +[coverage]: https://codecov.io/github/wooorm/longest-streak + +[downloads-badge]: https://img.shields.io/npm/dm/longest-streak.svg + +[downloads]: https://www.npmjs.com/package/longest-streak + +[size-badge]: https://img.shields.io/bundlephobia/minzip/longest-streak.svg + +[size]: https://bundlephobia.com/result?p=longest-streak + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com diff --git a/node_modules/markdown-escapes/index.js b/node_modules/markdown-escapes/index.js deleted file mode 100644 index f8bea48e..00000000 --- a/node_modules/markdown-escapes/index.js +++ /dev/null @@ -1,57 +0,0 @@ -'use strict' - -module.exports = escapes - -var defaults = [ - '\\', - '`', - '*', - '{', - '}', - '[', - ']', - '(', - ')', - '#', - '+', - '-', - '.', - '!', - '_', - '>' -] - -var gfm = defaults.concat(['~', '|']) - -var commonmark = gfm.concat([ - '\n', - '"', - '$', - '%', - '&', - "'", - ',', - '/', - ':', - ';', - '<', - '=', - '?', - '@', - '^' -]) - -escapes.default = defaults -escapes.gfm = gfm -escapes.commonmark = commonmark - -// Get markdown escapes. -function escapes(options) { - var settings = options || {} - - if (settings.commonmark) { - return commonmark - } - - return settings.gfm ? gfm : defaults -} diff --git a/node_modules/markdown-escapes/package.json b/node_modules/markdown-escapes/package.json deleted file mode 100644 index 7f94d86a..00000000 --- a/node_modules/markdown-escapes/package.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "name": "markdown-escapes", - "version": "1.0.4", - "description": "List of escapable characters in markdown", - "license": "MIT", - "keywords": [ - "markdown", - "escape", - "pedantic", - "gfm", - "commonmark" - ], - "repository": "wooorm/markdown-escapes", - "bugs": "https://github.com/wooorm/markdown-escapes/issues", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - }, - "author": "Titus Wormer (https://wooorm.com)", - "contributors": [ - "Titus Wormer (https://wooorm.com)" - ], - "files": [ - "index.js" - ], - "dependencies": {}, - "devDependencies": { - "browserify": "^16.0.0", - "nyc": "^14.0.0", - "prettier": "^1.0.0", - "remark-cli": "^7.0.0", - "remark-preset-wooorm": "^6.0.0", - "tape": "^4.0.0", - "tinyify": "^2.0.0", - "xo": "^0.25.0" - }, - "scripts": { - "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", - "build-bundle": "browserify . -s markdownEscapes -o markdown-escapes.js", - "build-mangle": "browserify . -s markdownEscapes -p tinyify -o markdown-escapes.min.js", - "build": "npm run build-bundle && npm run build-mangle", - "test-api": "node test", - "test-coverage": "nyc --reporter lcov tape test.js", - "test": "npm run format && npm run build && npm run test-coverage" - }, - "remarkConfig": { - "plugins": [ - "preset-wooorm" - ] - }, - "prettier": { - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "bracketSpacing": false, - "semi": false, - "trailingComma": "none" - }, - "xo": { - "prettier": true, - "esnext": false, - "ignores": [ - "markdown-escapes.js" - ] - }, - "nyc": { - "check-coverage": true, - "lines": 100, - "functions": 100, - "branches": 100 - } -} diff --git a/node_modules/markdown-escapes/readme.md b/node_modules/markdown-escapes/readme.md deleted file mode 100644 index a7404526..00000000 --- a/node_modules/markdown-escapes/readme.md +++ /dev/null @@ -1,80 +0,0 @@ -# markdown-escapes - -[![Build][build-badge]][build] -[![Coverage][coverage-badge]][coverage] -[![Downloads][downloads-badge]][downloads] -[![Size][size-badge]][size] - -List of escapable characters in markdown. - -## Install - -[npm][]: - -```sh -npm install markdown-escapes -``` - -## Use - -```js -var escapes = require('markdown-escapes'); - -// Access by property: -escapes.commonmark; //=> ['\\', '`', ..., '@', '^'] - -// Access by options object: -escapes({gfm: true}); //=> ['\\', '`', ..., '~', '|'] -``` - -## API - -### `escapes([options])` - -Get escapes. -Supports `options.commonmark` and `options.gfm`, which when `true` returns the -extra escape characters supported by those flavors. - -###### Returns - -`Array.`. - -### `escapes.default` - -List of default escapable characters. - -### `escapes.gfm` - -List of escapable characters in GFM (which includes all `default`s). - -### `escapes.commonmark` - -List of escapable characters in CommonMark (which includes all `gfm`s). - -## License - -[MIT][license] © [Titus Wormer][author] - - - -[build-badge]: https://img.shields.io/travis/wooorm/markdown-escapes.svg - -[build]: https://travis-ci.org/wooorm/markdown-escapes - -[coverage-badge]: https://img.shields.io/codecov/c/github/wooorm/markdown-escapes.svg - -[coverage]: https://codecov.io/github/wooorm/markdown-escapes - -[downloads-badge]: https://img.shields.io/npm/dm/markdown-escapes.svg - -[downloads]: https://www.npmjs.com/package/markdown-escapes - -[size-badge]: https://img.shields.io/bundlephobia/minzip/markdown-escapes.svg - -[size]: https://bundlephobia.com/result?p=markdown-escapes - -[npm]: https://docs.npmjs.com/cli/install - -[license]: license - -[author]: https://wooorm.com diff --git a/node_modules/markdown-table/index.js b/node_modules/markdown-table/index.js new file mode 100644 index 00000000..ff2a0ce5 --- /dev/null +++ b/node_modules/markdown-table/index.js @@ -0,0 +1,249 @@ +'use strict' + +var repeat = require('repeat-string') + +module.exports = markdownTable + +var trailingWhitespace = / +$/ + +// Characters. +var space = ' ' +var lineFeed = '\n' +var dash = '-' +var colon = ':' +var verticalBar = '|' + +var x = 0 +var C = 67 +var L = 76 +var R = 82 +var c = 99 +var l = 108 +var r = 114 + +// Create a table from a matrix of strings. +function markdownTable(table, options) { + var settings = options || {} + var padding = settings.padding !== false + var start = settings.delimiterStart !== false + var end = settings.delimiterEnd !== false + var align = (settings.align || []).concat() + var alignDelimiters = settings.alignDelimiters !== false + var alignments = [] + var stringLength = settings.stringLength || defaultStringLength + var rowIndex = -1 + var rowLength = table.length + var cellMatrix = [] + var sizeMatrix = [] + var row = [] + var sizes = [] + var longestCellByColumn = [] + var mostCellsPerRow = 0 + var cells + var columnIndex + var columnLength + var largest + var size + var cell + var lines + var line + var before + var after + var code + + // This is a superfluous loop if we don’t align delimiters, but otherwise we’d + // do superfluous work when aligning, so optimize for aligning. + while (++rowIndex < rowLength) { + cells = table[rowIndex] + columnIndex = -1 + columnLength = cells.length + row = [] + sizes = [] + + if (columnLength > mostCellsPerRow) { + mostCellsPerRow = columnLength + } + + while (++columnIndex < columnLength) { + cell = serialize(cells[columnIndex]) + + if (alignDelimiters === true) { + size = stringLength(cell) + sizes[columnIndex] = size + + largest = longestCellByColumn[columnIndex] + + if (largest === undefined || size > largest) { + longestCellByColumn[columnIndex] = size + } + } + + row.push(cell) + } + + cellMatrix[rowIndex] = row + sizeMatrix[rowIndex] = sizes + } + + // Figure out which alignments to use. + columnIndex = -1 + columnLength = mostCellsPerRow + + if (typeof align === 'object' && 'length' in align) { + while (++columnIndex < columnLength) { + alignments[columnIndex] = toAlignment(align[columnIndex]) + } + } else { + code = toAlignment(align) + + while (++columnIndex < columnLength) { + alignments[columnIndex] = code + } + } + + // Inject the alignment row. + columnIndex = -1 + columnLength = mostCellsPerRow + row = [] + sizes = [] + + while (++columnIndex < columnLength) { + code = alignments[columnIndex] + before = '' + after = '' + + if (code === l) { + before = colon + } else if (code === r) { + after = colon + } else if (code === c) { + before = colon + after = colon + } + + // There *must* be at least one hyphen-minus in each alignment cell. + size = alignDelimiters + ? Math.max( + 1, + longestCellByColumn[columnIndex] - before.length - after.length + ) + : 1 + + cell = before + repeat(dash, size) + after + + if (alignDelimiters === true) { + size = before.length + size + after.length + + if (size > longestCellByColumn[columnIndex]) { + longestCellByColumn[columnIndex] = size + } + + sizes[columnIndex] = size + } + + row[columnIndex] = cell + } + + // Inject the alignment row. + cellMatrix.splice(1, 0, row) + sizeMatrix.splice(1, 0, sizes) + + rowIndex = -1 + rowLength = cellMatrix.length + lines = [] + + while (++rowIndex < rowLength) { + row = cellMatrix[rowIndex] + sizes = sizeMatrix[rowIndex] + columnIndex = -1 + columnLength = mostCellsPerRow + line = [] + + while (++columnIndex < columnLength) { + cell = row[columnIndex] || '' + before = '' + after = '' + + if (alignDelimiters === true) { + size = longestCellByColumn[columnIndex] - (sizes[columnIndex] || 0) + code = alignments[columnIndex] + + if (code === r) { + before = repeat(space, size) + } else if (code === c) { + if (size % 2 === 0) { + before = repeat(space, size / 2) + after = before + } else { + before = repeat(space, size / 2 + 0.5) + after = repeat(space, size / 2 - 0.5) + } + } else { + after = repeat(space, size) + } + } + + if (start === true && columnIndex === 0) { + line.push(verticalBar) + } + + if ( + padding === true && + // Don’t add the opening space if we’re not aligning and the cell is + // empty: there will be a closing space. + !(alignDelimiters === false && cell === '') && + (start === true || columnIndex !== 0) + ) { + line.push(space) + } + + if (alignDelimiters === true) { + line.push(before) + } + + line.push(cell) + + if (alignDelimiters === true) { + line.push(after) + } + + if (padding === true) { + line.push(space) + } + + if (end === true || columnIndex !== columnLength - 1) { + line.push(verticalBar) + } + } + + line = line.join('') + + if (end === false) { + line = line.replace(trailingWhitespace, '') + } + + lines.push(line) + } + + return lines.join(lineFeed) +} + +function serialize(value) { + return value === null || value === undefined ? '' : String(value) +} + +function defaultStringLength(value) { + return value.length +} + +function toAlignment(value) { + var code = typeof value === 'string' ? value.charCodeAt(0) : x + + return code === L || code === l + ? l + : code === R || code === r + ? r + : code === C || code === c + ? c + : x +} diff --git a/node_modules/state-toggle/license b/node_modules/markdown-table/license similarity index 94% rename from node_modules/state-toggle/license rename to node_modules/markdown-table/license index 8d8660d3..0c06d5bc 100644 --- a/node_modules/state-toggle/license +++ b/node_modules/markdown-table/license @@ -1,6 +1,6 @@ (The MIT License) -Copyright (c) 2016 Titus Wormer +Copyright (c) 2014 Titus Wormer Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the diff --git a/node_modules/collapse-white-space/package.json b/node_modules/markdown-table/package.json similarity index 64% rename from node_modules/collapse-white-space/package.json rename to node_modules/markdown-table/package.json index 6c9e8f34..fb04ec7b 100644 --- a/node_modules/collapse-white-space/package.json +++ b/node_modules/markdown-table/package.json @@ -1,19 +1,22 @@ { - "name": "collapse-white-space", - "version": "1.0.6", - "description": "Replace multiple white-space characters with a single space", + "name": "markdown-table", + "version": "2.0.0", + "description": "Markdown tables", "license": "MIT", "keywords": [ - "collapse", - "white", - "space" + "text", + "markdown", + "table", + "align", + "rows", + "tabular" ], - "repository": "wooorm/collapse-white-space", - "bugs": "https://github.com/wooorm/collapse-white-space/issues", + "repository": "wooorm/markdown-table", + "bugs": "https://github.com/wooorm/markdown-table/issues", "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - }, + "type": "github", + "url": "https://github.com/sponsors/wooorm" + }, "author": "Titus Wormer (https://wooorm.com)", "contributors": [ "Titus Wormer (https://wooorm.com)" @@ -21,26 +24,35 @@ "files": [ "index.js" ], - "dependencies": {}, + "dependencies": { + "repeat-string": "^1.0.0" + }, "devDependencies": { "browserify": "^16.0.0", + "chalk": "^3.0.0", "nyc": "^15.0.0", "prettier": "^1.0.0", "remark-cli": "^7.0.0", "remark-preset-wooorm": "^6.0.0", + "strip-ansi": "^6.0.0", "tape": "^4.0.0", "tinyify": "^2.0.0", "xo": "^0.25.0" }, "scripts": { "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", - "build-bundle": "browserify . -s collapseWhiteSpace -o collapse-white-space.js", - "build-mangle": "browserify . -s collapseWhiteSpace -p tinyify -o collapse-white-space.min.js", + "build-bundle": "browserify . -s markdownTable -o markdown-table.js", + "build-mangle": "browserify . -s markdownTable -p tinyify -o markdown-table.min.js", "build": "npm run build-bundle && npm run build-mangle", "test-api": "node test", "test-coverage": "nyc --reporter lcov tape test.js", "test": "npm run format && npm run build && npm run test-coverage" }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + }, "prettier": { "tabWidth": 2, "useTabs": false, @@ -52,13 +64,11 @@ "xo": { "prettier": true, "esnext": false, + "rules": { + "complexity": "off" + }, "ignores": [ - "collapse-white-space.js" - ] - }, - "remarkConfig": { - "plugins": [ - "preset-wooorm" + "markdown-table.js" ] }, "nyc": { diff --git a/node_modules/markdown-table/readme.md b/node_modules/markdown-table/readme.md new file mode 100644 index 00000000..19e823ce --- /dev/null +++ b/node_modules/markdown-table/readme.md @@ -0,0 +1,259 @@ +# markdown-table + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] + +Generate fancy [Markdown][fancy] tables. + +## Install + +[npm][]: + +```sh +npm install markdown-table +``` + +## Use + +Typical usage (defaults to align left): + +```js +var table = require('markdown-table') + +table([ + ['Branch', 'Commit'], + ['master', '0123456789abcdef'], + ['staging', 'fedcba9876543210'] +]) +``` + +Yields: + +```markdown +| Branch | Commit | +| ------- | ---------------- | +| master | 0123456789abcdef | +| staging | fedcba9876543210 | +``` + +With align: + +```js +table( + [ + ['Beep', 'No.', 'Boop'], + ['beep', '1024', 'xyz'], + ['boop', '3388450', 'tuv'], + ['foo', '10106', 'qrstuv'], + ['bar', '45', 'lmno'] + ], + {align: ['l', 'c', 'r']} +) +``` + +Yields: + +```markdown +| Beep | No. | Boop | +| :--- | :-----: | -----: | +| beep | 1024 | xyz | +| boop | 3388450 | tuv | +| foo | 10106 | qrstuv | +| bar | 45 | lmno | +``` + +## API + +### `markdownTable(table[, options])` + +Turns a given matrix of strings (an array of arrays of strings) into a table. + +##### `options` + +###### `options.align` + +One style for all columns, or styles for their respective columns (`string` or +`Array.`). +Each style is either `'l'` (left), `'r'` (right), or `'c'` (center). +Other values are treated as `''`, which doesn’t place the colon in the alignment +row but does align left. +*Only the lowercased first character is used, so `Right` is fine.* + +###### `options.padding` + +Whether to add a space of padding between delimiters and cells (`boolean`, +default: `true`). + +When `true`, there is padding: + +```markdown +| Alpha | B | +| ----- | ----- | +| C | Delta | +``` + +When `false`, there is no padding: + +```markdown +|Alpha|B | +|-----|-----| +|C |Delta| +``` + +###### `options.delimiterStart` + +Whether to begin each row with the delimiter (`boolean`, default: `true`). + +Note: please don’t use this: it could create fragile structures that aren’t +understandable to some Markdown parsers. + +When `true`, there are starting delimiters: + +```markdown +| Alpha | B | +| ----- | ----- | +| C | Delta | +``` + +When `false`, there are no starting delimiters: + +```markdown +Alpha | B | +----- | ----- | +C | Delta | +``` + +###### `options.delimiterEnd` + +Whether to end each row with the delimiter (`boolean`, default: `true`). + +Note: please don’t use this: it could create fragile structures that aren’t +understandable to some Markdown parsers. + +When `true`, there are ending delimiters: + +```markdown +| Alpha | B | +| ----- | ----- | +| C | Delta | +``` + +When `false`, there are no ending delimiters: + +```markdown +| Alpha | B +| ----- | ----- +| C | Delta +``` + +###### `options.alignDelimiters` + +Whether to align the delimiters (`boolean`, default: `true`). +By default, they are aligned: + +```markdown +| Alpha | B | +| ----- | ----- | +| C | Delta | +``` + +Pass `false` to make them staggered: + +```markdown +| Alpha | B | +| - | - | +| C | Delta | +``` + +###### `options.stringLength` + +Method to detect the length of a cell (`Function`, default: `s => s.length`). + +Full-width characters and ANSI-sequences all mess up delimiter alignment +when viewing the Markdown source. +To fix this, you have to pass in a `stringLength` option to detect the “visible” +length of a cell (note that what is and isn’t visible depends on your editor). + +Without such a function, the following: + +```js +table([ + ['Alpha', 'Bravo'], + ['中文', 'Charlie'], + ['👩‍❤️‍👩', 'Delta'] +]) +``` + +Yields: + +```markdown +| Alpha | Bravo | +| - | - | +| 中文 | Charlie | +| 👩‍❤️‍👩 | Delta | +``` + +With [`string-width`][string-width]: + +```js +var width = require('string-width') + +table( + [ + ['Alpha', 'Bravo'], + ['中文', 'Charlie'], + ['👩‍❤️‍👩', 'Delta'] + ], + {stringLength: width} +) +``` + +Yields: + +```markdown +| Alpha | Bravo | +| ----- | ------- | +| 中文 | Charlie | +| 👩‍❤️‍👩 | Delta | +``` + +## Inspiration + +The original idea and basic implementation was inspired by James Halliday’s +[`text-table`][text-table] library. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://img.shields.io/travis/wooorm/markdown-table.svg + +[build]: https://travis-ci.org/wooorm/markdown-table + +[coverage-badge]: https://img.shields.io/codecov/c/github/wooorm/markdown-table.svg + +[coverage]: https://codecov.io/github/wooorm/markdown-table + +[downloads-badge]: https://img.shields.io/npm/dm/markdown-table.svg + +[downloads]: https://www.npmjs.com/package/markdown-table + +[size-badge]: https://img.shields.io/bundlephobia/minzip/markdown-table.svg + +[size]: https://bundlephobia.com/result?p=markdown-table + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[fancy]: https://help.github.com/articles/github-flavored-markdown/#tables + +[text-table]: https://github.com/substack/text-table + +[string-width]: https://github.com/sindresorhus/string-width diff --git a/node_modules/mdast-util-find-and-replace/index.js b/node_modules/mdast-util-find-and-replace/index.js new file mode 100644 index 00000000..a6c28e43 --- /dev/null +++ b/node_modules/mdast-util-find-and-replace/index.js @@ -0,0 +1,180 @@ +'use strict' + +module.exports = findAndReplace + +var visit = require('unist-util-visit-parents') +var convert = require('unist-util-is/convert') +var escape = require('escape-string-regexp') + +var splice = [].splice + +function findAndReplace(tree, find, replace, options) { + var settings + var schema + + if (typeof find === 'string' || (find && typeof find.exec === 'function')) { + schema = [[find, replace]] + } else { + schema = find + options = replace + } + + settings = options || {} + + search(tree, settings, handlerFactory(toPairs(schema))) + + return tree + + function handlerFactory(pairs) { + var pair = pairs[0] + + return handler + + function handler(node, parent) { + var find = pair[0] + var replace = pair[1] + var nodes = [] + var start = 0 + var index = parent.children.indexOf(node) + var position + var match + var subhandler + var value + + find.lastIndex = 0 + + match = find.exec(node.value) + + while (match) { + position = match.index + value = replace.apply( + null, + [].concat(match, {index: match.index, input: match.input}) + ) + + if (value !== false) { + if (start !== position) { + nodes.push({type: 'text', value: node.value.slice(start, position)}) + } + + if (typeof value === 'string' && value.length > 0) { + value = {type: 'text', value: value} + } + + if (value) { + nodes = [].concat(nodes, value) + } + + start = position + match[0].length + } + + if (!find.global) { + break + } + + match = find.exec(node.value) + } + + if (position === undefined) { + nodes = [node] + index-- + } else { + if (start < node.value.length) { + nodes.push({type: 'text', value: node.value.slice(start)}) + } + + nodes.unshift(index, 1) + splice.apply(parent.children, nodes) + } + + if (pairs.length > 1) { + subhandler = handlerFactory(pairs.slice(1)) + position = -1 + + while (++position < nodes.length) { + node = nodes[position] + + if (node.type === 'text') { + subhandler(node, parent) + } else { + search(node, settings, subhandler) + } + } + } + + return index + nodes.length + 1 + } + } +} + +function search(tree, settings, handler) { + var ignored = convert(settings.ignore || []) + var result = [] + + visit(tree, 'text', visitor) + + return result + + function visitor(node, parents) { + var index = -1 + var parent + var grandparent + + while (++index < parents.length) { + parent = parents[index] + + if ( + ignored( + parent, + grandparent ? grandparent.children.indexOf(parent) : undefined, + grandparent + ) + ) { + return + } + + grandparent = parent + } + + return handler(node, grandparent) + } +} + +function toPairs(schema) { + var result = [] + var key + var index + + if (typeof schema !== 'object') { + throw new Error('Expected array or object as schema') + } + + if ('length' in schema) { + index = -1 + + while (++index < schema.length) { + result.push([ + toExpression(schema[index][0]), + toFunction(schema[index][1]) + ]) + } + } else { + for (key in schema) { + result.push([toExpression(key), toFunction(schema[key])]) + } + } + + return result +} + +function toExpression(find) { + return typeof find === 'string' ? new RegExp(escape(find), 'g') : find +} + +function toFunction(replace) { + return typeof replace === 'function' ? replace : returner + + function returner() { + return replace + } +} diff --git a/node_modules/markdown-escapes/license b/node_modules/mdast-util-find-and-replace/license similarity index 94% rename from node_modules/markdown-escapes/license rename to node_modules/mdast-util-find-and-replace/license index 8d8660d3..39372356 100644 --- a/node_modules/markdown-escapes/license +++ b/node_modules/mdast-util-find-and-replace/license @@ -1,6 +1,6 @@ (The MIT License) -Copyright (c) 2016 Titus Wormer +Copyright (c) 2020 Titus Wormer Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the diff --git a/node_modules/mdast-util-find-and-replace/package.json b/node_modules/mdast-util-find-and-replace/package.json new file mode 100644 index 00000000..4f89eab4 --- /dev/null +++ b/node_modules/mdast-util-find-and-replace/package.json @@ -0,0 +1,76 @@ +{ + "name": "mdast-util-find-and-replace", + "version": "1.1.1", + "description": "mdast utility to find and replace text in a tree", + "license": "MIT", + "keywords": [ + "unist", + "mdast", + "mdast-util", + "util", + "utility", + "markdown", + "find", + "replace" + ], + "repository": "syntax-tree/mdast-util-find-and-replace", + "bugs": "https://github.com/syntax-tree/mdast-util-find-and-replace/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "index.js" + ], + "dependencies": { + "escape-string-regexp": "^4.0.0", + "unist-util-is": "^4.0.0", + "unist-util-visit-parents": "^3.0.0" + }, + "devDependencies": { + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "unist-builder": "^2.0.0", + "xo": "^0.37.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test.js", + "test": "npm run format && npm run test-coverage" + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "rules": { + "unicorn/prefer-type-error": "off", + "guard-for-in": "off" + } + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/mdast-util-find-and-replace/readme.md b/node_modules/mdast-util-find-and-replace/readme.md new file mode 100644 index 00000000..3e92f719 --- /dev/null +++ b/node_modules/mdast-util-find-and-replace/readme.md @@ -0,0 +1,187 @@ +# mdast-util-find-and-replace + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +[**mdast**][mdast] utility to find and replace text in a [*tree*][tree]. + +## Install + +[npm][]: + +```sh +npm install mdast-util-find-and-replace +``` + +## Use + +```js +var u = require('unist-builder') +var inspect = require('unist-util-inspect') +var findAndReplace = require('mdast-util-find-and-replace') + +var tree = u('paragraph', [ + u('text', 'Some '), + u('emphasis', [u('text', 'emphasis')]), + u('text', ' and '), + u('strong', [u('text', 'importance')]), + u('text', '.') +]) + +findAndReplace(tree, 'and', 'or') + +findAndReplace(tree, {emphasis: 'em', importance: 'strong'}) + +findAndReplace(tree, { + Some: function ($0) { + return u('link', {url: '//example.com#' + $0}, [u('text', $0)]) + } +}) + +console.log(inspect(tree)) +``` + +Yields: + +```txt +paragraph[8] +├─ link[1] [url="//example.com#Some"] +│ └─ text: "Some" +├─ text: " " +├─ emphasis[1] +│ └─ text: "em" +├─ text: " " +├─ text: "or" +├─ text: " " +├─ strong[1] +│ └─ text: "strong" +└─ text: "." +``` + +## API + +### `findAndReplace(tree, find[, replace][, options])` + +Find and replace text in [**mdast**][mdast] [*tree*][tree]s. +The algorithm searches the tree in [*preorder*][preorder] for complete values +in [`Text`][text] nodes. +Partial matches are not supported. + +###### Signatures + +* `findAndReplace(tree, find, replace?[, options])` +* `findAndReplace(tree, search[, options])` + +###### Parameters + +* `tree` ([`Node`][node]) + — [**mdast**][mdast] [*tree*][tree] +* `find` (`string` or `RegExp`) + — Value to find and remove. + When `string`, escaped and made into a global `RegExp` +* `replace` (`string` or `Function`) + — Value to insert. + When `string`, turned into a [`Text`][text] node. + When `Function`, invoked with the results of calling `RegExp.exec` as + arguments, in which case it can return a single or a list of [`Node`][node], + a `string` (which is wrapped in a [`Text`][text] node), or `false` to not + replace +* `search` (`Object` or `Array`) + — Perform multiple find-and-replaces. + When `Array`, each entry is a tuple (`Array`) of a `find` (at `0`) and + `replace` (at `1`). + When `Object`, each key is a `find` (in string form) and each value is a + `replace` +* `options.ignore` (`Test`, default: `[]`) + — Any [`unist-util-is`][test] compatible test. + +###### Returns + +The given, modified, `tree`. + +## Security + +Use of `mdast-util-find-and-replace` does not involve [**hast**][hast] or user +content so there are no openings for [cross-site scripting (XSS)][xss] attacks. + +## Related + +* [`hast-util-find-and-replace`](https://github.com/syntax-tree/hast-util-find-and-replace) + — hast utility to find and replace text +* [`unist-util-select`](https://github.com/syntax-tree/unist-util-select) + — select unist nodes with CSS-like selectors + +## Contribute + +See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/syntax-tree/mdast-util-find-and-replace/workflows/main/badge.svg + +[build]: https://github.com/syntax-tree/mdast-util-find-and-replace/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/mdast-util-find-and-replace.svg + +[coverage]: https://codecov.io/github/syntax-tree/mdast-util-find-and-replace + +[downloads-badge]: https://img.shields.io/npm/dm/mdast-util-find-and-replace.svg + +[downloads]: https://www.npmjs.com/package/mdast-util-find-and-replace + +[size-badge]: https://img.shields.io/bundlephobia/minzip/mdast-util-find-and-replace.svg + +[size]: https://bundlephobia.com/result?p=mdast-util-find-and-replace + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/syntax-tree/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md + +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md + +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md + +[hast]: https://github.com/syntax-tree/hast + +[mdast]: https://github.com/syntax-tree/mdast + +[node]: https://github.com/syntax-tree/mdast#ndoes + +[tree]: https://github.com/syntax-tree/unist#tree + +[preorder]: https://github.com/syntax-tree/unist#preorder + +[text]: https://github.com/syntax-tree/mdast#text + +[xss]: https://en.wikipedia.org/wiki/Cross-site_scripting + +[test]: https://github.com/syntax-tree/unist-util-is#api diff --git a/node_modules/mdast-util-footnote/from-markdown.js b/node_modules/mdast-util-footnote/from-markdown.js new file mode 100644 index 00000000..39f83d7b --- /dev/null +++ b/node_modules/mdast-util-footnote/from-markdown.js @@ -0,0 +1,69 @@ +var normalizeIdentifier = require('micromark/dist/util/normalize-identifier') + +exports.canContainEols = ['footnote'] + +exports.enter = { + footnoteDefinition: enterFootnoteDefinition, + footnoteDefinitionLabelString: enterFootnoteDefinitionLabelString, + footnoteCall: enterFootnoteCall, + footnoteCallString: enterFootnoteCallString, + inlineNote: enterNote +} +exports.exit = { + footnoteDefinition: exitFootnoteDefinition, + footnoteDefinitionLabelString: exitFootnoteDefinitionLabelString, + footnoteCall: exitFootnoteCall, + footnoteCallString: exitFootnoteCallString, + inlineNote: exitNote +} + +function enterFootnoteDefinition(token) { + this.enter( + {type: 'footnoteDefinition', identifier: '', label: '', children: []}, + token + ) +} + +function enterFootnoteDefinitionLabelString() { + this.buffer() +} + +function exitFootnoteDefinitionLabelString(token) { + var label = this.resume() + this.stack[this.stack.length - 1].label = label + this.stack[this.stack.length - 1].identifier = normalizeIdentifier( + this.sliceSerialize(token) + ).toLowerCase() +} + +function exitFootnoteDefinition(token) { + this.exit(token) +} + +function enterFootnoteCall(token) { + this.enter({type: 'footnoteReference', identifier: '', label: ''}, token) +} + +function enterFootnoteCallString() { + this.buffer() +} + +function exitFootnoteCallString(token) { + var label = this.resume() + this.stack[this.stack.length - 1].label = label + this.stack[this.stack.length - 1].identifier = normalizeIdentifier( + this.sliceSerialize(token) + ).toLowerCase() +} + +function exitFootnoteCall(token) { + this.exit(token) +} + +function enterNote(token) { + this.enter({type: 'footnote', children: []}, token) +} + +function exitNote(token) { + this.exit(token) +} diff --git a/node_modules/mdast-util-footnote/index.js b/node_modules/mdast-util-footnote/index.js new file mode 100644 index 00000000..6da28c60 --- /dev/null +++ b/node_modules/mdast-util-footnote/index.js @@ -0,0 +1,2 @@ +exports.fromMarkdown = require('./from-markdown') +exports.toMarkdown = require('./to-markdown') diff --git a/node_modules/unist-util-remove-position/license b/node_modules/mdast-util-footnote/license similarity index 94% rename from node_modules/unist-util-remove-position/license rename to node_modules/mdast-util-footnote/license index 8d8660d3..39372356 100644 --- a/node_modules/unist-util-remove-position/license +++ b/node_modules/mdast-util-footnote/license @@ -1,6 +1,6 @@ (The MIT License) -Copyright (c) 2016 Titus Wormer +Copyright (c) 2020 Titus Wormer Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the diff --git a/node_modules/mdast-util-footnote/package.json b/node_modules/mdast-util-footnote/package.json new file mode 100644 index 00000000..9f0b8011 --- /dev/null +++ b/node_modules/mdast-util-footnote/package.json @@ -0,0 +1,76 @@ +{ + "name": "mdast-util-footnote", + "version": "0.1.7", + "description": "mdast extension to parse and serialize footnotes", + "license": "MIT", + "keywords": [ + "unist", + "mdast", + "mdast-util", + "util", + "utility", + "markdown", + "markup", + "footnote", + "note", + "pandoc" + ], + "repository": "syntax-tree/mdast-util-footnote", + "bugs": "https://github.com/syntax-tree/mdast-util-footnote/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "from-markdown.js", + "index.js", + "to-markdown.js" + ], + "dependencies": { + "mdast-util-to-markdown": "^0.6.0", + "micromark": "~2.11.0" + }, + "devDependencies": { + "mdast-util-from-markdown": "^0.8.0", + "micromark-extension-footnote": "~0.3.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "xo": "^0.36.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/mdast-util-footnote/readme.md b/node_modules/mdast-util-footnote/readme.md new file mode 100644 index 00000000..ac93d026 --- /dev/null +++ b/node_modules/mdast-util-footnote/readme.md @@ -0,0 +1,270 @@ +# mdast-util-footnote + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +Extension for [`mdast-util-from-markdown`][from-markdown] and/or +[`mdast-util-to-markdown`][to-markdown] to support footnotes in **[mdast][]**. +When parsing (`from-markdown`), must be combined with +[`micromark-extension-footnote`][extension]. + +You probably shouldn’t use this package directly, but instead use +[`remark-footnotes`][remark-footnotes] with **[remark][]**. + +## Install + +[npm][]: + +```sh +npm install mdast-util-footnote +``` + +## Use + +Say we have the following file, `example.md`: + +```markdown +Here is a footnote call,[^1] and another.[^longnote] + +[^1]: Here is the footnote. + +[^longnote]: Here’s one with multiple blocks. + + Subsequent paragraphs are indented to show that they +belong to the previous footnote. + + { some.code } + + The whole paragraph can be indented, or just the first + line. In this way, multi-paragraph footnotes work like + multi-paragraph list items. + +This paragraph won’t be part of the note, because it +isn’t indented. + +Here is an inline note.^[Inlines notes are easier to write, since +you don’t have to pick an identifier and move down to type the +note.] +``` + +And our script, `example.js`, looks as follows: + +```js +var fs = require('fs') +var fromMarkdown = require('mdast-util-from-markdown') +var toMarkdown = require('mdast-util-to-markdown') +var syntax = require('micromark-extension-footnote') +var footnote = require('mdast-util-footnote') + +var doc = fs.readFileSync('example.md') + +var tree = fromMarkdown(doc, { + extensions: [syntax({inlineNotes: true})], + mdastExtensions: [footnote.fromMarkdown] +}) + +console.log(tree) + +var out = toMarkdown(tree, {extensions: [footnote.toMarkdown]}) + +console.log(out) +``` + +Now, running `node example` yields: + +```js +{ + type: 'root', + children: [ + { + type: 'paragraph', + children: [ + {type: 'text', value: 'Here is a footnote call,'}, + {type: 'footnoteReference', identifier: '1', label: '1'}, + {type: 'text', value: ' and another.'}, + {type: 'footnoteReference', identifier: 'longnote', label: 'longnote'} + ] + }, + { + type: 'footnoteDefinition', + identifier: '1', + label: '1', + children: [ + { + type: 'paragraph', + children: [{type: 'text', value: 'Here is the footnote.'}] + } + ] + }, + { + type: 'footnoteDefinition', + identifier: 'longnote', + label: 'longnote', + children: [ + { + type: 'paragraph', + children: [{type: 'text', value: 'Here’s one with multiple blocks.'}] + }, + { + type: 'paragraph', + children: [ + {type: 'text', value: 'Subsequent paragraphs are indented to show that they\nbelong to the previous footnote.'} + ] + }, + {type: 'code', value: '{ some.code }'}, + { + type: 'paragraph', + children: [ + {type: 'text', value: 'The whole paragraph can be indented, or just the first\nline. In this way, multi-paragraph footnotes work like\nmulti-paragraph list items.'} + ] + } + ] + }, + { + type: 'paragraph', + children: [ + {type: 'text', value: 'This paragraph won’t be part of the note, because it\nisn’t indented.'} + ] + }, + { + type: 'paragraph', + children: [ + {type: 'text', value: 'Here is an inline note.'}, + { + type: 'footnote', + children: [ + {type: 'text', value: 'Inlines notes are easier to write, since\nyou don’t have to pick an identifier and move down to type the\nnote.'} + ] + } + ] + } + ] +} +``` + +```markdown +Here is a footnote call,[^1] and another.[^longnote] + +[^1]: Here is the footnote. + +[^longnote]: Here’s one with multiple blocks. + + Subsequent paragraphs are indented to show that they + belong to the previous footnote. + + { some.code } + + The whole paragraph can be indented, or just the first + line. In this way, multi-paragraph footnotes work like + multi-paragraph list items. + +This paragraph won’t be part of the note, because it +isn’t indented. + +Here is an inline note.^[Inlines notes are easier to write, since +you don’t have to pick an identifier and move down to type the +note.] +``` + +## API + +### `footnote.fromMarkdown` + +### `footnote.toMarkdown` + +> Note: the separate extensions are also available at +> `mdast-util-footnote/from-markdown` and +> `mdast-util-footnote/to-markdown`. + +Support footnotes. +These exports are extensions, respectively for +[`mdast-util-from-markdown`][from-markdown] and +[`mdast-util-to-markdown`][to-markdown]. + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`remarkjs/remark-footnotes`][remark-footnotes] + — remark plugin to support footnotes +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`micromark/micromark-extension-footnote`][extension] + — micromark extension to parse footnotes +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/syntax-tree/mdast-util-footnote/workflows/main/badge.svg + +[build]: https://github.com/syntax-tree/mdast-util-footnote/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/mdast-util-footnote.svg + +[coverage]: https://codecov.io/github/syntax-tree/mdast-util-footnote + +[downloads-badge]: https://img.shields.io/npm/dm/mdast-util-footnote.svg + +[downloads]: https://www.npmjs.com/package/mdast-util-footnote + +[size-badge]: https://img.shields.io/bundlephobia/minzip/mdast-util-footnote.svg + +[size]: https://bundlephobia.com/result?p=mdast-util-footnote + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/syntax-tree/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md + +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md + +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md + +[mdast]: https://github.com/syntax-tree/mdast + +[remark]: https://github.com/remarkjs/remark + +[remark-footnotes]: https://github.com/remarkjs/remark-footnotes + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[micromark]: https://github.com/micromark/micromark + +[extension]: https://github.com/micromark/micromark-extension-footnote diff --git a/node_modules/mdast-util-footnote/to-markdown.js b/node_modules/mdast-util-footnote/to-markdown.js new file mode 100644 index 00000000..f4260089 --- /dev/null +++ b/node_modules/mdast-util-footnote/to-markdown.js @@ -0,0 +1,66 @@ +exports.unsafe = [ + // This is on by default already. + {character: '[', inConstruct: ['phrasing', 'label', 'reference']} +] +exports.handlers = { + footnote: footnote, + footnoteDefinition: footnoteDefinition, + footnoteReference: footnoteReference +} + +var association = require('mdast-util-to-markdown/lib/util/association') +var phrasing = require('mdast-util-to-markdown/lib/util/container-phrasing') +var flow = require('mdast-util-to-markdown/lib/util/container-flow') +var indentLines = require('mdast-util-to-markdown/lib/util/indent-lines') +var safe = require('mdast-util-to-markdown/lib/util/safe') + +footnoteReference.peek = footnoteReferencePeek +footnote.peek = footnotePeek + +function footnoteReference(node, _, context) { + var exit = context.enter('footnoteReference') + var subexit = context.enter('reference') + var reference = safe(context, association(node), {before: '^', after: ']'}) + subexit() + exit() + return '[^' + reference + ']' +} + +function footnoteReferencePeek() { + return '[' +} + +function footnote(node, _, context) { + var exit = context.enter('footnote') + var subexit = context.enter('label') + var value = '^[' + phrasing(node, context, {before: '[', after: ']'}) + ']' + subexit() + exit() + return value +} + +function footnotePeek() { + return '^' +} + +function footnoteDefinition(node, _, context) { + var exit = context.enter('footnoteDefinition') + var subexit = context.enter('label') + var label = + '[^' + safe(context, association(node), {before: '^', after: ']'}) + ']:' + var value + subexit() + + value = indentLines(flow(node, context), map) + exit() + + return value + + function map(line, index, blank) { + if (index) { + return (blank ? '' : ' ') + line + } + + return (blank ? label : label + ' ') + line + } +} diff --git a/node_modules/mdast-util-from-markdown/dist/index.js b/node_modules/mdast-util-from-markdown/dist/index.js new file mode 100644 index 00000000..c34f023f --- /dev/null +++ b/node_modules/mdast-util-from-markdown/dist/index.js @@ -0,0 +1,823 @@ +'use strict' + +module.exports = fromMarkdown + +// These three are compiled away in the `dist/` + +var toString = require('mdast-util-to-string') +var assign = require('micromark/dist/constant/assign') +var own = require('micromark/dist/constant/has-own-property') +var normalizeIdentifier = require('micromark/dist/util/normalize-identifier') +var safeFromInt = require('micromark/dist/util/safe-from-int') +var parser = require('micromark/dist/parse') +var preprocessor = require('micromark/dist/preprocess') +var postprocess = require('micromark/dist/postprocess') +var decode = require('parse-entities/decode-entity') +var stringifyPosition = require('unist-util-stringify-position') + +function fromMarkdown(value, encoding, options) { + if (typeof encoding !== 'string') { + options = encoding + encoding = undefined + } + + return compiler(options)( + postprocess( + parser(options).document().write(preprocessor()(value, encoding, true)) + ) + ) +} + +// Note this compiler only understand complete buffering, not streaming. +function compiler(options) { + var settings = options || {} + var config = configure( + { + transforms: [], + canContainEols: [ + 'emphasis', + 'fragment', + 'heading', + 'paragraph', + 'strong' + ], + + enter: { + autolink: opener(link), + autolinkProtocol: onenterdata, + autolinkEmail: onenterdata, + atxHeading: opener(heading), + blockQuote: opener(blockQuote), + characterEscape: onenterdata, + characterReference: onenterdata, + codeFenced: opener(codeFlow), + codeFencedFenceInfo: buffer, + codeFencedFenceMeta: buffer, + codeIndented: opener(codeFlow, buffer), + codeText: opener(codeText, buffer), + codeTextData: onenterdata, + data: onenterdata, + codeFlowValue: onenterdata, + definition: opener(definition), + definitionDestinationString: buffer, + definitionLabelString: buffer, + definitionTitleString: buffer, + emphasis: opener(emphasis), + hardBreakEscape: opener(hardBreak), + hardBreakTrailing: opener(hardBreak), + htmlFlow: opener(html, buffer), + htmlFlowData: onenterdata, + htmlText: opener(html, buffer), + htmlTextData: onenterdata, + image: opener(image), + label: buffer, + link: opener(link), + listItem: opener(listItem), + listItemValue: onenterlistitemvalue, + listOrdered: opener(list, onenterlistordered), + listUnordered: opener(list), + paragraph: opener(paragraph), + reference: onenterreference, + referenceString: buffer, + resourceDestinationString: buffer, + resourceTitleString: buffer, + setextHeading: opener(heading), + strong: opener(strong), + thematicBreak: opener(thematicBreak) + }, + + exit: { + atxHeading: closer(), + atxHeadingSequence: onexitatxheadingsequence, + autolink: closer(), + autolinkEmail: onexitautolinkemail, + autolinkProtocol: onexitautolinkprotocol, + blockQuote: closer(), + characterEscapeValue: onexitdata, + characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker, + characterReferenceMarkerNumeric: onexitcharacterreferencemarker, + characterReferenceValue: onexitcharacterreferencevalue, + codeFenced: closer(onexitcodefenced), + codeFencedFence: onexitcodefencedfence, + codeFencedFenceInfo: onexitcodefencedfenceinfo, + codeFencedFenceMeta: onexitcodefencedfencemeta, + codeFlowValue: onexitdata, + codeIndented: closer(onexitcodeindented), + codeText: closer(onexitcodetext), + codeTextData: onexitdata, + data: onexitdata, + definition: closer(), + definitionDestinationString: onexitdefinitiondestinationstring, + definitionLabelString: onexitdefinitionlabelstring, + definitionTitleString: onexitdefinitiontitlestring, + emphasis: closer(), + hardBreakEscape: closer(onexithardbreak), + hardBreakTrailing: closer(onexithardbreak), + htmlFlow: closer(onexithtmlflow), + htmlFlowData: onexitdata, + htmlText: closer(onexithtmltext), + htmlTextData: onexitdata, + image: closer(onexitimage), + label: onexitlabel, + labelText: onexitlabeltext, + lineEnding: onexitlineending, + link: closer(onexitlink), + listItem: closer(), + listOrdered: closer(), + listUnordered: closer(), + paragraph: closer(), + referenceString: onexitreferencestring, + resourceDestinationString: onexitresourcedestinationstring, + resourceTitleString: onexitresourcetitlestring, + resource: onexitresource, + setextHeading: closer(onexitsetextheading), + setextHeadingLineSequence: onexitsetextheadinglinesequence, + setextHeadingText: onexitsetextheadingtext, + strong: closer(), + thematicBreak: closer() + } + }, + + settings.mdastExtensions || [] + ) + + var data = {} + + return compile + + function compile(events) { + var tree = {type: 'root', children: []} + var stack = [tree] + var tokenStack = [] + var listStack = [] + var index = -1 + var handler + var listStart + + var context = { + stack: stack, + tokenStack: tokenStack, + config: config, + enter: enter, + exit: exit, + buffer: buffer, + resume: resume, + setData: setData, + getData: getData + } + + while (++index < events.length) { + // We preprocess lists to add `listItem` tokens, and to infer whether + // items the list itself are spread out. + if ( + events[index][1].type === 'listOrdered' || + events[index][1].type === 'listUnordered' + ) { + if (events[index][0] === 'enter') { + listStack.push(index) + } else { + listStart = listStack.pop(index) + index = prepareList(events, listStart, index) + } + } + } + + index = -1 + + while (++index < events.length) { + handler = config[events[index][0]] + + if (own.call(handler, events[index][1].type)) { + handler[events[index][1].type].call( + assign({sliceSerialize: events[index][2].sliceSerialize}, context), + events[index][1] + ) + } + } + + if (tokenStack.length) { + throw new Error( + 'Cannot close document, a token (`' + + tokenStack[tokenStack.length - 1].type + + '`, ' + + stringifyPosition({ + start: tokenStack[tokenStack.length - 1].start, + end: tokenStack[tokenStack.length - 1].end + }) + + ') is still open' + ) + } + + // Figure out `root` position. + tree.position = { + start: point( + events.length ? events[0][1].start : {line: 1, column: 1, offset: 0} + ), + + end: point( + events.length + ? events[events.length - 2][1].end + : {line: 1, column: 1, offset: 0} + ) + } + + index = -1 + while (++index < config.transforms.length) { + tree = config.transforms[index](tree) || tree + } + + return tree + } + + function prepareList(events, start, length) { + var index = start - 1 + var containerBalance = -1 + var listSpread = false + var listItem + var tailIndex + var lineIndex + var tailEvent + var event + var firstBlankLineIndex + var atMarker + + while (++index <= length) { + event = events[index] + + if ( + event[1].type === 'listUnordered' || + event[1].type === 'listOrdered' || + event[1].type === 'blockQuote' + ) { + if (event[0] === 'enter') { + containerBalance++ + } else { + containerBalance-- + } + + atMarker = undefined + } else if (event[1].type === 'lineEndingBlank') { + if (event[0] === 'enter') { + if ( + listItem && + !atMarker && + !containerBalance && + !firstBlankLineIndex + ) { + firstBlankLineIndex = index + } + + atMarker = undefined + } + } else if ( + event[1].type === 'linePrefix' || + event[1].type === 'listItemValue' || + event[1].type === 'listItemMarker' || + event[1].type === 'listItemPrefix' || + event[1].type === 'listItemPrefixWhitespace' + ) { + // Empty. + } else { + atMarker = undefined + } + + if ( + (!containerBalance && + event[0] === 'enter' && + event[1].type === 'listItemPrefix') || + (containerBalance === -1 && + event[0] === 'exit' && + (event[1].type === 'listUnordered' || + event[1].type === 'listOrdered')) + ) { + if (listItem) { + tailIndex = index + lineIndex = undefined + + while (tailIndex--) { + tailEvent = events[tailIndex] + + if ( + tailEvent[1].type === 'lineEnding' || + tailEvent[1].type === 'lineEndingBlank' + ) { + if (tailEvent[0] === 'exit') continue + + if (lineIndex) { + events[lineIndex][1].type = 'lineEndingBlank' + listSpread = true + } + + tailEvent[1].type = 'lineEnding' + lineIndex = tailIndex + } else if ( + tailEvent[1].type === 'linePrefix' || + tailEvent[1].type === 'blockQuotePrefix' || + tailEvent[1].type === 'blockQuotePrefixWhitespace' || + tailEvent[1].type === 'blockQuoteMarker' || + tailEvent[1].type === 'listItemIndent' + ) { + // Empty + } else { + break + } + } + + if ( + firstBlankLineIndex && + (!lineIndex || firstBlankLineIndex < lineIndex) + ) { + listItem._spread = true + } + + // Fix position. + listItem.end = point( + lineIndex ? events[lineIndex][1].start : event[1].end + ) + + events.splice(lineIndex || index, 0, ['exit', listItem, event[2]]) + index++ + length++ + } + + // Create a new list item. + if (event[1].type === 'listItemPrefix') { + listItem = { + type: 'listItem', + _spread: false, + start: point(event[1].start) + } + + events.splice(index, 0, ['enter', listItem, event[2]]) + index++ + length++ + firstBlankLineIndex = undefined + atMarker = true + } + } + } + + events[start][1]._spread = listSpread + return length + } + + function setData(key, value) { + data[key] = value + } + + function getData(key) { + return data[key] + } + + function point(d) { + return {line: d.line, column: d.column, offset: d.offset} + } + + function opener(create, and) { + return open + + function open(token) { + enter.call(this, create(token), token) + if (and) and.call(this, token) + } + } + + function buffer() { + this.stack.push({type: 'fragment', children: []}) + } + + function enter(node, token) { + this.stack[this.stack.length - 1].children.push(node) + this.stack.push(node) + this.tokenStack.push(token) + node.position = {start: point(token.start)} + return node + } + + function closer(and) { + return close + + function close(token) { + if (and) and.call(this, token) + exit.call(this, token) + } + } + + function exit(token) { + var node = this.stack.pop() + var open = this.tokenStack.pop() + + if (!open) { + throw new Error( + 'Cannot close `' + + token.type + + '` (' + + stringifyPosition({start: token.start, end: token.end}) + + '): it’s not open' + ) + } else if (open.type !== token.type) { + throw new Error( + 'Cannot close `' + + token.type + + '` (' + + stringifyPosition({start: token.start, end: token.end}) + + '): a different token (`' + + open.type + + '`, ' + + stringifyPosition({start: open.start, end: open.end}) + + ') is open' + ) + } + + node.position.end = point(token.end) + return node + } + + function resume() { + return toString(this.stack.pop()) + } + + // + // Handlers. + // + + function onenterlistordered() { + setData('expectingFirstListItemValue', true) + } + + function onenterlistitemvalue(token) { + if (getData('expectingFirstListItemValue')) { + this.stack[this.stack.length - 2].start = parseInt( + this.sliceSerialize(token), + 10 + ) + + setData('expectingFirstListItemValue') + } + } + + function onexitcodefencedfenceinfo() { + var data = this.resume() + this.stack[this.stack.length - 1].lang = data + } + + function onexitcodefencedfencemeta() { + var data = this.resume() + this.stack[this.stack.length - 1].meta = data + } + + function onexitcodefencedfence() { + // Exit if this is the closing fence. + if (getData('flowCodeInside')) return + this.buffer() + setData('flowCodeInside', true) + } + + function onexitcodefenced() { + var data = this.resume() + this.stack[this.stack.length - 1].value = data.replace( + /^(\r?\n|\r)|(\r?\n|\r)$/g, + '' + ) + + setData('flowCodeInside') + } + + function onexitcodeindented() { + var data = this.resume() + this.stack[this.stack.length - 1].value = data + } + + function onexitdefinitionlabelstring(token) { + // Discard label, use the source content instead. + var label = this.resume() + this.stack[this.stack.length - 1].label = label + this.stack[this.stack.length - 1].identifier = normalizeIdentifier( + this.sliceSerialize(token) + ).toLowerCase() + } + + function onexitdefinitiontitlestring() { + var data = this.resume() + this.stack[this.stack.length - 1].title = data + } + + function onexitdefinitiondestinationstring() { + var data = this.resume() + this.stack[this.stack.length - 1].url = data + } + + function onexitatxheadingsequence(token) { + if (!this.stack[this.stack.length - 1].depth) { + this.stack[this.stack.length - 1].depth = this.sliceSerialize( + token + ).length + } + } + + function onexitsetextheadingtext() { + setData('setextHeadingSlurpLineEnding', true) + } + + function onexitsetextheadinglinesequence(token) { + this.stack[this.stack.length - 1].depth = + this.sliceSerialize(token).charCodeAt(0) === 61 ? 1 : 2 + } + + function onexitsetextheading() { + setData('setextHeadingSlurpLineEnding') + } + + function onenterdata(token) { + var siblings = this.stack[this.stack.length - 1].children + var tail = siblings[siblings.length - 1] + + if (!tail || tail.type !== 'text') { + // Add a new text node. + tail = text() + tail.position = {start: point(token.start)} + this.stack[this.stack.length - 1].children.push(tail) + } + + this.stack.push(tail) + } + + function onexitdata(token) { + var tail = this.stack.pop() + tail.value += this.sliceSerialize(token) + tail.position.end = point(token.end) + } + + function onexitlineending(token) { + var context = this.stack[this.stack.length - 1] + + // If we’re at a hard break, include the line ending in there. + if (getData('atHardBreak')) { + context.children[context.children.length - 1].position.end = point( + token.end + ) + + setData('atHardBreak') + return + } + + if ( + !getData('setextHeadingSlurpLineEnding') && + config.canContainEols.indexOf(context.type) > -1 + ) { + onenterdata.call(this, token) + onexitdata.call(this, token) + } + } + + function onexithardbreak() { + setData('atHardBreak', true) + } + + function onexithtmlflow() { + var data = this.resume() + this.stack[this.stack.length - 1].value = data + } + + function onexithtmltext() { + var data = this.resume() + this.stack[this.stack.length - 1].value = data + } + + function onexitcodetext() { + var data = this.resume() + this.stack[this.stack.length - 1].value = data + } + + function onexitlink() { + var context = this.stack[this.stack.length - 1] + + // To do: clean. + if (getData('inReference')) { + context.type += 'Reference' + context.referenceType = getData('referenceType') || 'shortcut' + delete context.url + delete context.title + } else { + delete context.identifier + delete context.label + delete context.referenceType + } + + setData('referenceType') + } + + function onexitimage() { + var context = this.stack[this.stack.length - 1] + + // To do: clean. + if (getData('inReference')) { + context.type += 'Reference' + context.referenceType = getData('referenceType') || 'shortcut' + delete context.url + delete context.title + } else { + delete context.identifier + delete context.label + delete context.referenceType + } + + setData('referenceType') + } + + function onexitlabeltext(token) { + this.stack[this.stack.length - 2].identifier = normalizeIdentifier( + this.sliceSerialize(token) + ).toLowerCase() + } + + function onexitlabel() { + var fragment = this.stack[this.stack.length - 1] + var value = this.resume() + + this.stack[this.stack.length - 1].label = value + + // Assume a reference. + setData('inReference', true) + + if (this.stack[this.stack.length - 1].type === 'link') { + this.stack[this.stack.length - 1].children = fragment.children + } else { + this.stack[this.stack.length - 1].alt = value + } + } + + function onexitresourcedestinationstring() { + var data = this.resume() + this.stack[this.stack.length - 1].url = data + } + + function onexitresourcetitlestring() { + var data = this.resume() + this.stack[this.stack.length - 1].title = data + } + + function onexitresource() { + setData('inReference') + } + + function onenterreference() { + setData('referenceType', 'collapsed') + } + + function onexitreferencestring(token) { + var label = this.resume() + this.stack[this.stack.length - 1].label = label + this.stack[this.stack.length - 1].identifier = normalizeIdentifier( + this.sliceSerialize(token) + ).toLowerCase() + setData('referenceType', 'full') + } + + function onexitcharacterreferencemarker(token) { + setData('characterReferenceType', token.type) + } + + function onexitcharacterreferencevalue(token) { + var data = this.sliceSerialize(token) + var type = getData('characterReferenceType') + var value + var tail + + if (type) { + value = safeFromInt( + data, + type === 'characterReferenceMarkerNumeric' ? 10 : 16 + ) + + setData('characterReferenceType') + } else { + value = decode(data) + } + + tail = this.stack.pop() + tail.value += value + tail.position.end = point(token.end) + } + + function onexitautolinkprotocol(token) { + onexitdata.call(this, token) + this.stack[this.stack.length - 1].url = this.sliceSerialize(token) + } + + function onexitautolinkemail(token) { + onexitdata.call(this, token) + this.stack[this.stack.length - 1].url = + 'mailto:' + this.sliceSerialize(token) + } + + // + // Creaters. + // + + function blockQuote() { + return {type: 'blockquote', children: []} + } + + function codeFlow() { + return {type: 'code', lang: null, meta: null, value: ''} + } + + function codeText() { + return {type: 'inlineCode', value: ''} + } + + function definition() { + return { + type: 'definition', + identifier: '', + label: null, + title: null, + url: '' + } + } + + function emphasis() { + return {type: 'emphasis', children: []} + } + + function heading() { + return {type: 'heading', depth: undefined, children: []} + } + + function hardBreak() { + return {type: 'break'} + } + + function html() { + return {type: 'html', value: ''} + } + + function image() { + return {type: 'image', title: null, url: '', alt: null} + } + + function link() { + return {type: 'link', title: null, url: '', children: []} + } + + function list(token) { + return { + type: 'list', + ordered: token.type === 'listOrdered', + start: null, + spread: token._spread, + children: [] + } + } + + function listItem(token) { + return { + type: 'listItem', + spread: token._spread, + checked: null, + children: [] + } + } + + function paragraph() { + return {type: 'paragraph', children: []} + } + + function strong() { + return {type: 'strong', children: []} + } + + function text() { + return {type: 'text', value: ''} + } + + function thematicBreak() { + return {type: 'thematicBreak'} + } +} + +function configure(config, extensions) { + var index = -1 + + while (++index < extensions.length) { + extension(config, extensions[index]) + } + + return config +} + +function extension(config, extension) { + var key + var left + + for (key in extension) { + left = own.call(config, key) ? config[key] : (config[key] = {}) + + if (key === 'canContainEols' || key === 'transforms') { + config[key] = [].concat(left, extension[key]) + } else { + Object.assign(left, extension[key]) + } + } +} diff --git a/node_modules/mdast-util-from-markdown/index.js b/node_modules/mdast-util-from-markdown/index.js new file mode 100644 index 00000000..2b74f75a --- /dev/null +++ b/node_modules/mdast-util-from-markdown/index.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('./dist') diff --git a/node_modules/mdast-util-from-markdown/lib/index.js b/node_modules/mdast-util-from-markdown/lib/index.js new file mode 100644 index 00000000..1e2e7806 --- /dev/null +++ b/node_modules/mdast-util-from-markdown/lib/index.js @@ -0,0 +1,819 @@ +'use strict' + +module.exports = fromMarkdown + +// These three are compiled away in the `dist/` +var codes = require('micromark/dist/character/codes') +var constants = require('micromark/dist/constant/constants') +var types = require('micromark/dist/constant/types') + +var toString = require('mdast-util-to-string') +var assign = require('micromark/dist/constant/assign') +var own = require('micromark/dist/constant/has-own-property') +var normalizeIdentifier = require('micromark/dist/util/normalize-identifier') +var safeFromInt = require('micromark/dist/util/safe-from-int') +var parser = require('micromark/dist/parse') +var preprocessor = require('micromark/dist/preprocess') +var postprocess = require('micromark/dist/postprocess') +var decode = require('parse-entities/decode-entity') +var stringifyPosition = require('unist-util-stringify-position') + +function fromMarkdown(value, encoding, options) { + if (typeof encoding !== 'string') { + options = encoding + encoding = undefined + } + + return compiler(options)( + postprocess( + parser(options).document().write(preprocessor()(value, encoding, true)) + ) + ) +} + +// Note this compiler only understand complete buffering, not streaming. +function compiler(options) { + var settings = options || {} + var config = configure( + { + transforms: [], + canContainEols: [ + 'emphasis', + 'fragment', + 'heading', + 'paragraph', + 'strong' + ], + enter: { + autolink: opener(link), + autolinkProtocol: onenterdata, + autolinkEmail: onenterdata, + atxHeading: opener(heading), + blockQuote: opener(blockQuote), + characterEscape: onenterdata, + characterReference: onenterdata, + codeFenced: opener(codeFlow), + codeFencedFenceInfo: buffer, + codeFencedFenceMeta: buffer, + codeIndented: opener(codeFlow, buffer), + codeText: opener(codeText, buffer), + codeTextData: onenterdata, + data: onenterdata, + codeFlowValue: onenterdata, + definition: opener(definition), + definitionDestinationString: buffer, + definitionLabelString: buffer, + definitionTitleString: buffer, + emphasis: opener(emphasis), + hardBreakEscape: opener(hardBreak), + hardBreakTrailing: opener(hardBreak), + htmlFlow: opener(html, buffer), + htmlFlowData: onenterdata, + htmlText: opener(html, buffer), + htmlTextData: onenterdata, + image: opener(image), + label: buffer, + link: opener(link), + listItem: opener(listItem), + listItemValue: onenterlistitemvalue, + listOrdered: opener(list, onenterlistordered), + listUnordered: opener(list), + paragraph: opener(paragraph), + reference: onenterreference, + referenceString: buffer, + resourceDestinationString: buffer, + resourceTitleString: buffer, + setextHeading: opener(heading), + strong: opener(strong), + thematicBreak: opener(thematicBreak) + }, + exit: { + atxHeading: closer(), + atxHeadingSequence: onexitatxheadingsequence, + autolink: closer(), + autolinkEmail: onexitautolinkemail, + autolinkProtocol: onexitautolinkprotocol, + blockQuote: closer(), + characterEscapeValue: onexitdata, + characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker, + characterReferenceMarkerNumeric: onexitcharacterreferencemarker, + characterReferenceValue: onexitcharacterreferencevalue, + codeFenced: closer(onexitcodefenced), + codeFencedFence: onexitcodefencedfence, + codeFencedFenceInfo: onexitcodefencedfenceinfo, + codeFencedFenceMeta: onexitcodefencedfencemeta, + codeFlowValue: onexitdata, + codeIndented: closer(onexitcodeindented), + codeText: closer(onexitcodetext), + codeTextData: onexitdata, + data: onexitdata, + definition: closer(), + definitionDestinationString: onexitdefinitiondestinationstring, + definitionLabelString: onexitdefinitionlabelstring, + definitionTitleString: onexitdefinitiontitlestring, + emphasis: closer(), + hardBreakEscape: closer(onexithardbreak), + hardBreakTrailing: closer(onexithardbreak), + htmlFlow: closer(onexithtmlflow), + htmlFlowData: onexitdata, + htmlText: closer(onexithtmltext), + htmlTextData: onexitdata, + image: closer(onexitimage), + label: onexitlabel, + labelText: onexitlabeltext, + lineEnding: onexitlineending, + link: closer(onexitlink), + listItem: closer(), + listOrdered: closer(), + listUnordered: closer(), + paragraph: closer(), + referenceString: onexitreferencestring, + resourceDestinationString: onexitresourcedestinationstring, + resourceTitleString: onexitresourcetitlestring, + resource: onexitresource, + setextHeading: closer(onexitsetextheading), + setextHeadingLineSequence: onexitsetextheadinglinesequence, + setextHeadingText: onexitsetextheadingtext, + strong: closer(), + thematicBreak: closer() + } + }, + settings.mdastExtensions || [] + ) + + var data = {} + + return compile + + function compile(events) { + var tree = {type: 'root', children: []} + var stack = [tree] + var tokenStack = [] + var listStack = [] + var index = -1 + var handler + var listStart + + var context = { + stack: stack, + tokenStack: tokenStack, + config: config, + enter: enter, + exit: exit, + buffer: buffer, + resume: resume, + setData: setData, + getData: getData + } + + while (++index < events.length) { + // We preprocess lists to add `listItem` tokens, and to infer whether + // items the list itself are spread out. + if ( + events[index][1].type === types.listOrdered || + events[index][1].type === types.listUnordered + ) { + if (events[index][0] === 'enter') { + listStack.push(index) + } else { + listStart = listStack.pop(index) + index = prepareList(events, listStart, index) + } + } + } + + index = -1 + + while (++index < events.length) { + handler = config[events[index][0]] + + if (own.call(handler, events[index][1].type)) { + handler[events[index][1].type].call( + assign({sliceSerialize: events[index][2].sliceSerialize}, context), + events[index][1] + ) + } + } + + if (tokenStack.length) { + throw new Error( + 'Cannot close document, a token (`' + + tokenStack[tokenStack.length - 1].type + + '`, ' + + stringifyPosition({ + start: tokenStack[tokenStack.length - 1].start, + end: tokenStack[tokenStack.length - 1].end + }) + + ') is still open' + ) + } + + // Figure out `root` position. + tree.position = { + start: point( + events.length ? events[0][1].start : {line: 1, column: 1, offset: 0} + ), + end: point( + events.length + ? events[events.length - 2][1].end + : {line: 1, column: 1, offset: 0} + ) + } + + index = -1 + while (++index < config.transforms.length) { + tree = config.transforms[index](tree) || tree + } + + return tree + } + + function prepareList(events, start, length) { + var index = start - 1 + var containerBalance = -1 + var listSpread = false + var listItem + var tailIndex + var lineIndex + var tailEvent + var event + var firstBlankLineIndex + var atMarker + + while (++index <= length) { + event = events[index] + + if ( + event[1].type === types.listUnordered || + event[1].type === types.listOrdered || + event[1].type === types.blockQuote + ) { + if (event[0] === 'enter') { + containerBalance++ + } else { + containerBalance-- + } + + atMarker = undefined + } else if (event[1].type === types.lineEndingBlank) { + if (event[0] === 'enter') { + if ( + listItem && + !atMarker && + !containerBalance && + !firstBlankLineIndex + ) { + firstBlankLineIndex = index + } + + atMarker = undefined + } + } else if ( + event[1].type === types.linePrefix || + event[1].type === types.listItemValue || + event[1].type === types.listItemMarker || + event[1].type === types.listItemPrefix || + event[1].type === types.listItemPrefixWhitespace + ) { + // Empty. + } else { + atMarker = undefined + } + + if ( + (!containerBalance && + event[0] === 'enter' && + event[1].type === types.listItemPrefix) || + (containerBalance === -1 && + event[0] === 'exit' && + (event[1].type === types.listUnordered || + event[1].type === types.listOrdered)) + ) { + if (listItem) { + tailIndex = index + lineIndex = undefined + + while (tailIndex--) { + tailEvent = events[tailIndex] + + if ( + tailEvent[1].type === types.lineEnding || + tailEvent[1].type === types.lineEndingBlank + ) { + if (tailEvent[0] === 'exit') continue + + if (lineIndex) { + events[lineIndex][1].type = types.lineEndingBlank + listSpread = true + } + + tailEvent[1].type = types.lineEnding + lineIndex = tailIndex + } else if ( + tailEvent[1].type === types.linePrefix || + tailEvent[1].type === types.blockQuotePrefix || + tailEvent[1].type === types.blockQuotePrefixWhitespace || + tailEvent[1].type === types.blockQuoteMarker || + tailEvent[1].type === types.listItemIndent + ) { + // Empty + } else { + break + } + } + + if ( + firstBlankLineIndex && + (!lineIndex || firstBlankLineIndex < lineIndex) + ) { + listItem._spread = true + } + + // Fix position. + listItem.end = point( + lineIndex ? events[lineIndex][1].start : event[1].end + ) + + events.splice(lineIndex || index, 0, ['exit', listItem, event[2]]) + index++ + length++ + } + + // Create a new list item. + if (event[1].type === types.listItemPrefix) { + listItem = { + type: 'listItem', + _spread: false, + start: point(event[1].start) + } + events.splice(index, 0, ['enter', listItem, event[2]]) + index++ + length++ + firstBlankLineIndex = undefined + atMarker = true + } + } + } + + events[start][1]._spread = listSpread + return length + } + + function setData(key, value) { + data[key] = value + } + + function getData(key) { + return data[key] + } + + function point(d) { + return {line: d.line, column: d.column, offset: d.offset} + } + + function opener(create, and) { + return open + + function open(token) { + enter.call(this, create(token), token) + if (and) and.call(this, token) + } + } + + function buffer() { + this.stack.push({type: 'fragment', children: []}) + } + + function enter(node, token) { + this.stack[this.stack.length - 1].children.push(node) + this.stack.push(node) + this.tokenStack.push(token) + node.position = {start: point(token.start)} + return node + } + + function closer(and) { + return close + + function close(token) { + if (and) and.call(this, token) + exit.call(this, token) + } + } + + function exit(token) { + var node = this.stack.pop() + var open = this.tokenStack.pop() + + if (!open) { + throw new Error( + 'Cannot close `' + + token.type + + '` (' + + stringifyPosition({start: token.start, end: token.end}) + + '): it’s not open' + ) + } else if (open.type !== token.type) { + throw new Error( + 'Cannot close `' + + token.type + + '` (' + + stringifyPosition({start: token.start, end: token.end}) + + '): a different token (`' + + open.type + + '`, ' + + stringifyPosition({start: open.start, end: open.end}) + + ') is open' + ) + } + + node.position.end = point(token.end) + return node + } + + function resume() { + return toString(this.stack.pop()) + } + + // + // Handlers. + // + + function onenterlistordered() { + setData('expectingFirstListItemValue', true) + } + + function onenterlistitemvalue(token) { + if (getData('expectingFirstListItemValue')) { + this.stack[this.stack.length - 2].start = parseInt( + this.sliceSerialize(token), + constants.numericBaseDecimal + ) + setData('expectingFirstListItemValue') + } + } + + function onexitcodefencedfenceinfo() { + var data = this.resume() + this.stack[this.stack.length - 1].lang = data + } + + function onexitcodefencedfencemeta() { + var data = this.resume() + this.stack[this.stack.length - 1].meta = data + } + + function onexitcodefencedfence() { + // Exit if this is the closing fence. + if (getData('flowCodeInside')) return + this.buffer() + setData('flowCodeInside', true) + } + + function onexitcodefenced() { + var data = this.resume() + this.stack[this.stack.length - 1].value = data.replace( + /^(\r?\n|\r)|(\r?\n|\r)$/g, + '' + ) + setData('flowCodeInside') + } + + function onexitcodeindented() { + var data = this.resume() + this.stack[this.stack.length - 1].value = data + } + + function onexitdefinitionlabelstring(token) { + // Discard label, use the source content instead. + var label = this.resume() + this.stack[this.stack.length - 1].label = label + this.stack[this.stack.length - 1].identifier = normalizeIdentifier( + this.sliceSerialize(token) + ).toLowerCase() + } + + function onexitdefinitiontitlestring() { + var data = this.resume() + this.stack[this.stack.length - 1].title = data + } + + function onexitdefinitiondestinationstring() { + var data = this.resume() + this.stack[this.stack.length - 1].url = data + } + + function onexitatxheadingsequence(token) { + if (!this.stack[this.stack.length - 1].depth) { + this.stack[this.stack.length - 1].depth = this.sliceSerialize( + token + ).length + } + } + + function onexitsetextheadingtext() { + setData('setextHeadingSlurpLineEnding', true) + } + + function onexitsetextheadinglinesequence(token) { + this.stack[this.stack.length - 1].depth = + this.sliceSerialize(token).charCodeAt(0) === codes.equalsTo ? 1 : 2 + } + + function onexitsetextheading() { + setData('setextHeadingSlurpLineEnding') + } + + function onenterdata(token) { + var siblings = this.stack[this.stack.length - 1].children + var tail = siblings[siblings.length - 1] + + if (!tail || tail.type !== 'text') { + // Add a new text node. + tail = text() + tail.position = {start: point(token.start)} + this.stack[this.stack.length - 1].children.push(tail) + } + + this.stack.push(tail) + } + + function onexitdata(token) { + var tail = this.stack.pop() + tail.value += this.sliceSerialize(token) + tail.position.end = point(token.end) + } + + function onexitlineending(token) { + var context = this.stack[this.stack.length - 1] + + // If we’re at a hard break, include the line ending in there. + if (getData('atHardBreak')) { + context.children[context.children.length - 1].position.end = point( + token.end + ) + setData('atHardBreak') + return + } + + if ( + !getData('setextHeadingSlurpLineEnding') && + config.canContainEols.indexOf(context.type) > -1 + ) { + onenterdata.call(this, token) + onexitdata.call(this, token) + } + } + + function onexithardbreak() { + setData('atHardBreak', true) + } + + function onexithtmlflow() { + var data = this.resume() + this.stack[this.stack.length - 1].value = data + } + + function onexithtmltext() { + var data = this.resume() + this.stack[this.stack.length - 1].value = data + } + + function onexitcodetext() { + var data = this.resume() + this.stack[this.stack.length - 1].value = data + } + + function onexitlink() { + var context = this.stack[this.stack.length - 1] + + // To do: clean. + if (getData('inReference')) { + context.type += 'Reference' + context.referenceType = getData('referenceType') || 'shortcut' + delete context.url + delete context.title + } else { + delete context.identifier + delete context.label + delete context.referenceType + } + + setData('referenceType') + } + + function onexitimage() { + var context = this.stack[this.stack.length - 1] + + // To do: clean. + if (getData('inReference')) { + context.type += 'Reference' + context.referenceType = getData('referenceType') || 'shortcut' + delete context.url + delete context.title + } else { + delete context.identifier + delete context.label + delete context.referenceType + } + + setData('referenceType') + } + + function onexitlabeltext(token) { + this.stack[this.stack.length - 2].identifier = normalizeIdentifier( + this.sliceSerialize(token) + ).toLowerCase() + } + + function onexitlabel() { + var fragment = this.stack[this.stack.length - 1] + var value = this.resume() + + this.stack[this.stack.length - 1].label = value + + // Assume a reference. + setData('inReference', true) + + if (this.stack[this.stack.length - 1].type === 'link') { + this.stack[this.stack.length - 1].children = fragment.children + } else { + this.stack[this.stack.length - 1].alt = value + } + } + + function onexitresourcedestinationstring() { + var data = this.resume() + this.stack[this.stack.length - 1].url = data + } + + function onexitresourcetitlestring() { + var data = this.resume() + this.stack[this.stack.length - 1].title = data + } + + function onexitresource() { + setData('inReference') + } + + function onenterreference() { + setData('referenceType', 'collapsed') + } + + function onexitreferencestring(token) { + var label = this.resume() + this.stack[this.stack.length - 1].label = label + this.stack[this.stack.length - 1].identifier = normalizeIdentifier( + this.sliceSerialize(token) + ).toLowerCase() + setData('referenceType', 'full') + } + + function onexitcharacterreferencemarker(token) { + setData('characterReferenceType', token.type) + } + + function onexitcharacterreferencevalue(token) { + var data = this.sliceSerialize(token) + var type = getData('characterReferenceType') + var value + var tail + + if (type) { + value = safeFromInt( + data, + type === types.characterReferenceMarkerNumeric + ? constants.numericBaseDecimal + : constants.numericBaseHexadecimal + ) + setData('characterReferenceType') + } else { + value = decode(data) + } + + tail = this.stack.pop() + tail.value += value + tail.position.end = point(token.end) + } + + function onexitautolinkprotocol(token) { + onexitdata.call(this, token) + this.stack[this.stack.length - 1].url = this.sliceSerialize(token) + } + + function onexitautolinkemail(token) { + onexitdata.call(this, token) + this.stack[this.stack.length - 1].url = + 'mailto:' + this.sliceSerialize(token) + } + + // + // Creaters. + // + + function blockQuote() { + return {type: 'blockquote', children: []} + } + + function codeFlow() { + return {type: 'code', lang: null, meta: null, value: ''} + } + + function codeText() { + return {type: 'inlineCode', value: ''} + } + + function definition() { + return { + type: 'definition', + identifier: '', + label: null, + title: null, + url: '' + } + } + + function emphasis() { + return {type: 'emphasis', children: []} + } + + function heading() { + return {type: 'heading', depth: undefined, children: []} + } + + function hardBreak() { + return {type: 'break'} + } + + function html() { + return {type: 'html', value: ''} + } + + function image() { + return {type: 'image', title: null, url: '', alt: null} + } + + function link() { + return {type: 'link', title: null, url: '', children: []} + } + + function list(token) { + return { + type: 'list', + ordered: token.type === 'listOrdered', + start: null, + spread: token._spread, + children: [] + } + } + + function listItem(token) { + return { + type: 'listItem', + spread: token._spread, + checked: null, + children: [] + } + } + + function paragraph() { + return {type: 'paragraph', children: []} + } + + function strong() { + return {type: 'strong', children: []} + } + + function text() { + return {type: 'text', value: ''} + } + + function thematicBreak() { + return {type: 'thematicBreak'} + } +} + +function configure(config, extensions) { + var index = -1 + + while (++index < extensions.length) { + extension(config, extensions[index]) + } + + return config +} + +function extension(config, extension) { + var key + var left + + for (key in extension) { + left = own.call(config, key) ? config[key] : (config[key] = {}) + + if (key === 'canContainEols' || key === 'transforms') { + config[key] = [].concat(left, extension[key]) + } else { + Object.assign(left, extension[key]) + } + } +} diff --git a/node_modules/is-word-character/license b/node_modules/mdast-util-from-markdown/license similarity index 94% rename from node_modules/is-word-character/license rename to node_modules/mdast-util-from-markdown/license index 8d8660d3..39372356 100644 --- a/node_modules/is-word-character/license +++ b/node_modules/mdast-util-from-markdown/license @@ -1,6 +1,6 @@ (The MIT License) -Copyright (c) 2016 Titus Wormer +Copyright (c) 2020 Titus Wormer Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the diff --git a/node_modules/mdast-util-from-markdown/package.json b/node_modules/mdast-util-from-markdown/package.json new file mode 100644 index 00000000..b17e76df --- /dev/null +++ b/node_modules/mdast-util-from-markdown/package.json @@ -0,0 +1,109 @@ +{ + "name": "mdast-util-from-markdown", + "version": "0.8.5", + "description": "mdast utility to parse markdown", + "license": "MIT", + "keywords": [ + "unist", + "mdast", + "mdast-util", + "util", + "utility", + "markdown", + "markup", + "parse", + "syntax", + "tree", + "ast" + ], + "repository": "syntax-tree/mdast-util-from-markdown", + "bugs": "https://github.com/syntax-tree/mdast-util-from-markdown/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "dist/", + "lib/", + "index.js", + "types/index.d.ts" + ], + "types": "types", + "dependencies": { + "@types/mdast": "^3.0.0", + "mdast-util-to-string": "^2.0.0", + "micromark": "~2.11.0", + "parse-entities": "^2.0.0", + "unist-util-stringify-position": "^2.0.0" + }, + "devDependencies": { + "@babel/cli": "^7.0.0", + "@babel/core": "^7.0.0", + "babel-plugin-inline-constants": "^1.0.0", + "browserify": "^17.0.0", + "commonmark.json": "^0.29.0", + "dtslint": "^4.0.0", + "gzip-size-cli": "^4.0.0", + "hast-util-to-html": "^7.0.0", + "mdast-util-to-hast": "^10.0.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "rehype-parse": "^7.0.0", + "rehype-stringify": "^8.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "tinyify": "^3.0.0", + "unified": "^9.0.0", + "xo": "^0.37.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "generate-dist": "babel lib/ --out-dir dist/ --quiet --retain-lines; prettier dist/ --loglevel error --write", + "generate-size": "browserify . -p tinyify -s mdast-util-from-markdown -o mdast-util-from-markdown.min.js; gzip-size mdast-util-from-markdown.min.js --raw", + "generate": "npm run generate-dist && npm run generate-size", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test/index.js", + "test-types": "dtslint types", + "test": "npm run format && npm run generate && npm run test-coverage && npm run test-types" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "rules": { + "complexity": "off", + "guard-for-in": "off", + "unicorn/explicit-length-check": "off", + "unicorn/no-array-callback-reference": "off", + "unicorn/prefer-includes": "off", + "unicorn/prefer-number-properties": "off", + "unicorn/prefer-optional-catch-binding": "off" + }, + "ignores": [ + "types/" + ] + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/mdast-util-from-markdown/readme.md b/node_modules/mdast-util-from-markdown/readme.md new file mode 100644 index 00000000..30362141 --- /dev/null +++ b/node_modules/mdast-util-from-markdown/readme.md @@ -0,0 +1,206 @@ +# mdast-util-from-markdown + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +**[mdast][]** utility to parse markdown. + +## Install + +[npm][]: + +```sh +npm install mdast-util-from-markdown +``` + +## Use + +Say we have the following markdown file, `example.md`: + +```markdown +## Hello, *World*! +``` + +And our script, `example.js`, looks as follows: + +```js +var fs = require('fs') +var fromMarkdown = require('mdast-util-from-markdown') + +var doc = fs.readFileSync('example.md') + +var tree = fromMarkdown(doc) + +console.log(tree) +``` + +Now, running `node example` yields (positional info removed for brevity): + +```js +{ + type: 'root', + children: [ + { + type: 'heading', + depth: 2, + children: [ + {type: 'text', value: 'Hello, '}, + { + type: 'emphasis', + children: [{type: 'text', value: 'World'}] + }, + {type: 'text', value: '!'} + ] + } + ] +} +``` + +## API + +### `fromMarkdown(doc[, encoding][, options])` + +Parse markdown to a **[mdast][]** tree. + +##### Parameters + +###### `doc` + +Value to parse (`string` or [`Buffer`][buffer]). + +###### `encoding` + +[Character encoding][encoding] to understand `doc` as when it’s a +[`Buffer`][buffer] (`string`, default: `'utf8'`). + +###### `options.extensions` + +Array of syntax extensions (`Array.`, default: `[]`). +Passed to [`micromark` as `extensions`][micromark-extensions]. + +###### `options.mdastExtensions` + +Array of mdast extensions (`Array.`, default: `[]`). + +##### Returns + +[`Root`][root]. + +## List of extensions + +* [`syntax-tree/mdast-util-directive`](https://github.com/syntax-tree/mdast-util-directive) + — parse directives +* [`syntax-tree/mdast-util-footnote`](https://github.com/syntax-tree/mdast-util-footnote) + — parse footnotes +* [`syntax-tree/mdast-util-frontmatter`](https://github.com/syntax-tree/mdast-util-frontmatter) + — parse frontmatter (YAML, TOML, more) +* [`syntax-tree/mdast-util-gfm`](https://github.com/syntax-tree/mdast-util-gfm) + — parse GFM +* [`syntax-tree/mdast-util-gfm-autolink-literal`](https://github.com/syntax-tree/mdast-util-gfm-autolink-literal) + — parse GFM autolink literals +* [`syntax-tree/mdast-util-gfm-strikethrough`](https://github.com/syntax-tree/mdast-util-gfm-strikethrough) + — parse GFM strikethrough +* [`syntax-tree/mdast-util-gfm-table`](https://github.com/syntax-tree/mdast-util-gfm-table) + — parse GFM tables +* [`syntax-tree/mdast-util-gfm-task-list-item`](https://github.com/syntax-tree/mdast-util-gfm-task-list-item) + — parse GFM task list items +* [`syntax-tree/mdast-util-math`](https://github.com/syntax-tree/mdast-util-math) + — parse math +* [`syntax-tree/mdast-util-mdx`](https://github.com/syntax-tree/mdast-util-mdx) + — parse MDX or MDX.js +* [`syntax-tree/mdast-util-mdx-expression`](https://github.com/syntax-tree/mdast-util-mdx-expression) + — parse MDX or MDX.js expressions +* [`syntax-tree/mdast-util-mdx-jsx`](https://github.com/syntax-tree/mdast-util-mdx-jsx) + — parse MDX or MDX.js JSX +* [`syntax-tree/mdast-util-mdxjs-esm`](https://github.com/syntax-tree/mdast-util-mdxjs-esm) + — parse MDX.js ESM + +## Security + +As Markdown is sometimes used for HTML, and improper use of HTML can open you up +to a [cross-site scripting (XSS)][xss] attack, use of `mdast-util-from-markdown` +can also be unsafe. +When going to HTML, use this utility in combination with +[`hast-util-sanitize`][sanitize] to make the tree safe. + +## Related + +* [`micromark/micromark`](https://github.com/micromark/micromark) + — the smallest commonmark-compliant markdown parser that exists +* [`remarkjs/remark`](https://github.com/remarkjs/remark) + — markdown processor powered by plugins +* [`syntax-tree/mdast-util-to-markdown`](https://github.com/syntax-tree/mdast-util-to-markdown) + — serialize mdast to markdown + +## Contribute + +See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/syntax-tree/mdast-util-from-markdown/workflows/main/badge.svg + +[build]: https://github.com/syntax-tree/mdast-util-from-markdown/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/mdast-util-from-markdown.svg + +[coverage]: https://codecov.io/github/syntax-tree/mdast-util-from-markdown + +[downloads-badge]: https://img.shields.io/npm/dm/mdast-util-from-markdown.svg + +[downloads]: https://www.npmjs.com/package/mdast-util-from-markdown + +[size-badge]: https://img.shields.io/bundlephobia/minzip/mdast-util-from-markdown.svg + +[size]: https://bundlephobia.com/result?p=mdast-util-from-markdown + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/syntax-tree/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md + +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md + +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md + +[mdast]: https://github.com/syntax-tree/mdast + +[root]: https://github.com/syntax-tree/mdast#root + +[encoding]: https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings + +[buffer]: https://nodejs.org/api/buffer.html + +[xss]: https://en.wikipedia.org/wiki/Cross-site_scripting + +[sanitize]: https://github.com/syntax-tree/hast-util-sanitize + +[micromark-extensions]: https://github.com/micromark/micromark#optionsextensions diff --git a/node_modules/mdast-util-from-markdown/types/index.d.ts b/node_modules/mdast-util-from-markdown/types/index.d.ts new file mode 100644 index 00000000..cb858ad0 --- /dev/null +++ b/node_modules/mdast-util-from-markdown/types/index.d.ts @@ -0,0 +1,34 @@ +// Minimum TypeScript Version: 3.0 +import { + Buffer, + BufferEncoding, + SyntaxExtension, + Token +} from 'micromark/dist/shared-types' +import {Root} from 'mdast' +import {Type} from 'micromark/dist/constant/types' + +export = fromMarkdown + +declare namespace fromMarkdown { + interface MdastExtension { + enter: Record void> + exit: Record void> + } + + interface Options { + extensions?: SyntaxExtension[] + mdastExtensions?: MdastExtension[] + } +} + +declare function fromMarkdown( + value: string | Buffer, + options?: fromMarkdown.Options +): Root + +declare function fromMarkdown( + value: string | Buffer, + encoding?: BufferEncoding, + options?: fromMarkdown.Options +): Root diff --git a/node_modules/mdast-util-frontmatter/from-markdown.js b/node_modules/mdast-util-frontmatter/from-markdown.js new file mode 100644 index 00000000..894505f7 --- /dev/null +++ b/node_modules/mdast-util-frontmatter/from-markdown.js @@ -0,0 +1,40 @@ +module.exports = createFromMarkdown + +var matters = require('micromark-extension-frontmatter/lib/matters') + +function createFromMarkdown(options) { + var settings = matters(options) + var length = settings.length + var index = -1 + var enter = {} + var exit = {} + var matter + + while (++index < length) { + matter = settings[index] + enter[matter.type] = opener(matter) + exit[matter.type] = close + exit[matter.type + 'Value'] = value + } + + return {enter: enter, exit: exit} +} + +function opener(matter) { + return open + function open(token) { + this.enter({type: matter.type, value: ''}, token) + this.buffer() + } +} + +function close(token) { + var data = this.resume() + // Remove the initial and final eol. + this.exit(token).value = data.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, '') +} + +function value(token) { + this.config.enter.data.call(this, token) + this.config.exit.data.call(this, token) +} diff --git a/node_modules/mdast-util-frontmatter/index.js b/node_modules/mdast-util-frontmatter/index.js new file mode 100644 index 00000000..6da28c60 --- /dev/null +++ b/node_modules/mdast-util-frontmatter/index.js @@ -0,0 +1,2 @@ +exports.fromMarkdown = require('./from-markdown') +exports.toMarkdown = require('./to-markdown') diff --git a/node_modules/mdast-util-frontmatter/license b/node_modules/mdast-util-frontmatter/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/mdast-util-frontmatter/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mdast-util-frontmatter/package.json b/node_modules/mdast-util-frontmatter/package.json new file mode 100644 index 00000000..052135c0 --- /dev/null +++ b/node_modules/mdast-util-frontmatter/package.json @@ -0,0 +1,76 @@ +{ + "name": "mdast-util-frontmatter", + "version": "0.2.0", + "description": "mdast extension to parse and serialize frontmatter (YAML, TOML, etc)", + "license": "MIT", + "keywords": [ + "unist", + "mdast", + "mdast-util", + "util", + "utility", + "markdown", + "markup", + "frontmatter", + "yaml", + "toml", + "gfm" + ], + "repository": "syntax-tree/mdast-util-frontmatter", + "bugs": "https://github.com/syntax-tree/mdast-util-frontmatter/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "from-markdown.js", + "index.js", + "to-markdown.js" + ], + "dependencies": { + "micromark-extension-frontmatter": "^0.2.0" + }, + "devDependencies": { + "mdast-util-from-markdown": "^0.5.0", + "mdast-util-to-markdown": "^0.3.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^8.0.0", + "remark-preset-wooorm": "^7.0.0", + "tape": "^5.0.0", + "xo": "^0.33.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/mdast-util-frontmatter/readme.md b/node_modules/mdast-util-frontmatter/readme.md new file mode 100644 index 00000000..b3d0440a --- /dev/null +++ b/node_modules/mdast-util-frontmatter/readme.md @@ -0,0 +1,186 @@ +# mdast-util-frontmatter + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +Extension for [`mdast-util-from-markdown`][from-markdown] and/or +[`mdast-util-to-markdown`][to-markdown] to support frontmatter in **[mdast][]**. +When parsing (`from-markdown`), must be combined with +[`micromark-extension-frontmatter`][extension]. + +You probably shouldn’t use this package directly, but instead use +[`remark-frontmatter`][remark-frontmatter] with **[remark][]**. + +## Install + +[npm][]: + +```sh +npm install mdast-util-frontmatter +``` + +## Use + +Say we have the following file, `example.md`: + +```markdown ++++ +title = "New Website" ++++ + +# Other markdown +``` + +And our script, `example.js`, looks as follows: + +```js +var fs = require('fs') +var fromMarkdown = require('mdast-util-from-markdown') +var toMarkdown = require('mdast-util-to-markdown') +var syntax = require('micromark-extension-frontmatter') +var frontmatter = require('mdast-util-frontmatter') + +var doc = fs.readFileSync('example.md') + +var tree = fromMarkdown(doc, { + extensions: [syntax(['yaml', 'toml'])], + mdastExtensions: [frontmatter.fromMarkdown(['yaml', 'toml'])] +}) + +console.log(tree) + +var out = toMarkdown({extensions: [frontmatter.toMarkdown(['yaml', 'toml'])]}) + +console.log(out) +``` + +Now, running `node example` yields: + +```js +{ + type: 'root', + children: [ + {type: 'toml', value: 'title = "New Website"'}, + { + type: 'heading', + depth: 1, + children: [{type: 'text', value: 'Other markdown'}] + } + ] +} +``` + +```markdown ++++ +title = "New Website" ++++ + +# Other markdown +``` + +## API + +### `frontmatter.fromMarkdown([options])` + +### `frontmatter.toMarkdown([options])` + +> Note: the separate extensions are also available at +> `mdast-util-frontmatter/from-markdown` and +> `mdast-util-frontmatter/to-markdown`. + +Support frontmatter (YAML, TOML, and more). +These functions can be called with options and return extensions, respectively +for [`mdast-util-from-markdown`][from-markdown] and +[`mdast-util-to-markdown`][to-markdown]. + +Options are the same as [`micromark-extension-frontmatter`][options]. + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`remarkjs/remark-frontmatter`][remark-frontmatter] + — remark plugin to support frontmatter +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`micromark/micromark-extension-frontmatter`][extension] + — micromark extension to parse frontmatter +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://img.shields.io/travis/syntax-tree/mdast-util-frontmatter.svg + +[build]: https://travis-ci.org/syntax-tree/mdast-util-frontmatter + +[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/mdast-util-frontmatter.svg + +[coverage]: https://codecov.io/github/syntax-tree/mdast-util-frontmatter + +[downloads-badge]: https://img.shields.io/npm/dm/mdast-util-frontmatter.svg + +[downloads]: https://www.npmjs.com/package/mdast-util-frontmatter + +[size-badge]: https://img.shields.io/bundlephobia/minzip/mdast-util-frontmatter.svg + +[size]: https://bundlephobia.com/result?p=mdast-util-frontmatter + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/syntax-tree/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md + +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md + +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md + +[mdast]: https://github.com/syntax-tree/mdast + +[remark]: https://github.com/remarkjs/remark + +[remark-frontmatter]: https://github.com/remarkjs/remark-frontmatter + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[micromark]: https://github.com/micromark/micromark + +[extension]: https://github.com/micromark/micromark-extension-frontmatter + +[options]: https://github.com/micromark/micromark-extension-frontmatter#options diff --git a/node_modules/mdast-util-frontmatter/to-markdown.js b/node_modules/mdast-util-frontmatter/to-markdown.js new file mode 100644 index 00000000..5392746d --- /dev/null +++ b/node_modules/mdast-util-frontmatter/to-markdown.js @@ -0,0 +1,46 @@ +module.exports = createToMarkdown + +var matters = require('micromark-extension-frontmatter/lib/matters') + +function createToMarkdown(options) { + var unsafe = [] + var handlers = {} + var settings = matters(options) + var length = settings.length + var index = -1 + var matter + + while (++index < length) { + matter = settings[index] + handlers[matter.type] = handler(matter) + unsafe.push({atBreak: true, character: fence(matter, 'open').charAt(0)}) + } + + return {unsafe: unsafe, handlers: handlers} +} + +function handler(matter) { + var open = fence(matter, 'open') + var close = fence(matter, 'close') + + return handle + + function handle(node) { + return open + (node.value ? '\n' + node.value : '') + '\n' + close + } +} + +function fence(matter, prop) { + var marker + + if (matter.marker) { + marker = pick(matter.marker, prop) + return marker + marker + marker + } + + return pick(matter.fence, prop) +} + +function pick(schema, prop) { + return typeof schema === 'string' ? schema : schema[prop] +} diff --git a/node_modules/mdast-util-gfm-autolink-literal/from-markdown.js b/node_modules/mdast-util-gfm-autolink-literal/from-markdown.js new file mode 100644 index 00000000..8a24bccd --- /dev/null +++ b/node_modules/mdast-util-gfm-autolink-literal/from-markdown.js @@ -0,0 +1,157 @@ +var ccount = require('ccount') +var findAndReplace = require('mdast-util-find-and-replace') +var unicodePunctuation = require('micromark/dist/character/unicode-punctuation') +var unicodeWhitespace = require('micromark/dist/character/unicode-whitespace') + +exports.transforms = [transformGfmAutolinkLiterals] +exports.enter = { + literalAutolink: enterLiteralAutolink, + literalAutolinkEmail: enterLiteralAutolinkValue, + literalAutolinkHttp: enterLiteralAutolinkValue, + literalAutolinkWww: enterLiteralAutolinkValue +} +exports.exit = { + literalAutolink: exitLiteralAutolink, + literalAutolinkEmail: exitLiteralAutolinkEmail, + literalAutolinkHttp: exitLiteralAutolinkHttp, + literalAutolinkWww: exitLiteralAutolinkWww +} + +function enterLiteralAutolink(token) { + this.enter({type: 'link', title: null, url: '', children: []}, token) +} + +function enterLiteralAutolinkValue(token) { + this.config.enter.autolinkProtocol.call(this, token) +} + +function exitLiteralAutolinkHttp(token) { + this.config.exit.autolinkProtocol.call(this, token) +} + +function exitLiteralAutolinkWww(token) { + this.config.exit.data.call(this, token) + this.stack[this.stack.length - 1].url = 'http://' + this.sliceSerialize(token) +} + +function exitLiteralAutolinkEmail(token) { + this.config.exit.autolinkEmail.call(this, token) +} + +function exitLiteralAutolink(token) { + this.exit(token) +} + +function transformGfmAutolinkLiterals(tree) { + findAndReplace( + tree, + [ + [/(https?:\/\/|www(?=\.))([-.\w]+)([^ \t\r\n]*)/i, findUrl], + [/([-.\w+]+)@([-\w]+(?:\.[-\w]+)+)/, findEmail] + ], + {ignore: ['link', 'linkReference']} + ) +} + +function findUrl($0, protocol, domain, path, match) { + var prefix = '' + var parts + var result + + // Not an expected previous character. + if (!previous(match)) { + return false + } + + // Treat `www` as part of the domain. + if (/^w/i.test(protocol)) { + domain = protocol + domain + protocol = '' + prefix = 'http://' + } + + if (!isCorrectDomain(domain)) { + return false + } + + parts = splitUrl(domain + path) + + if (!parts[0]) return false + + result = { + type: 'link', + title: null, + url: prefix + protocol + parts[0], + children: [{type: 'text', value: protocol + parts[0]}] + } + + if (parts[1]) { + result = [result, {type: 'text', value: parts[1]}] + } + + return result +} + +function findEmail($0, atext, label, match) { + // Not an expected previous character. + if (!previous(match, true) || /[_-]$/.test(label)) { + return false + } + + return { + type: 'link', + title: null, + url: 'mailto:' + atext + '@' + label, + children: [{type: 'text', value: atext + '@' + label}] + } +} + +function isCorrectDomain(domain) { + var parts = domain.split('.') + + if ( + parts.length < 2 || + (parts[parts.length - 1] && + (/_/.test(parts[parts.length - 1]) || + !/[a-zA-Z\d]/.test(parts[parts.length - 1]))) || + (parts[parts.length - 2] && + (/_/.test(parts[parts.length - 2]) || + !/[a-zA-Z\d]/.test(parts[parts.length - 2]))) + ) { + return false + } + + return true +} + +function splitUrl(url) { + var trail = /[!"&'),.:;<>?\]}]+$/.exec(url) + var closingParenIndex + var openingParens + var closingParens + + if (trail) { + url = url.slice(0, trail.index) + trail = trail[0] + closingParenIndex = trail.indexOf(')') + openingParens = ccount(url, '(') + closingParens = ccount(url, ')') + + while (closingParenIndex !== -1 && openingParens > closingParens) { + url += trail.slice(0, closingParenIndex + 1) + trail = trail.slice(closingParenIndex + 1) + closingParenIndex = trail.indexOf(')') + closingParens++ + } + } + + return [url, trail] +} + +function previous(match, email) { + var code = match.input.charCodeAt(match.index - 1) + return ( + (code !== code || unicodeWhitespace(code) || unicodePunctuation(code)) && + (!email || code !== 47) + ) +} diff --git a/node_modules/mdast-util-gfm-autolink-literal/index.js b/node_modules/mdast-util-gfm-autolink-literal/index.js new file mode 100644 index 00000000..6da28c60 --- /dev/null +++ b/node_modules/mdast-util-gfm-autolink-literal/index.js @@ -0,0 +1,2 @@ +exports.fromMarkdown = require('./from-markdown') +exports.toMarkdown = require('./to-markdown') diff --git a/node_modules/mdast-util-gfm-autolink-literal/license b/node_modules/mdast-util-gfm-autolink-literal/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/mdast-util-gfm-autolink-literal/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mdast-util-gfm-autolink-literal/package.json b/node_modules/mdast-util-gfm-autolink-literal/package.json new file mode 100644 index 00000000..a934c5b8 --- /dev/null +++ b/node_modules/mdast-util-gfm-autolink-literal/package.json @@ -0,0 +1,90 @@ +{ + "name": "mdast-util-gfm-autolink-literal", + "version": "0.1.3", + "description": "mdast extension to parse and serialize GFM autolink literals", + "license": "MIT", + "keywords": [ + "unist", + "mdast", + "mdast-util", + "util", + "utility", + "markdown", + "markup", + "autolink", + "auto", + "link", + "literal", + "url", + "raw", + "gfm" + ], + "repository": "syntax-tree/mdast-util-gfm-autolink-literal", + "bugs": "https://github.com/syntax-tree/mdast-util-gfm-autolink-literal/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "from-markdown.js", + "index.js", + "to-markdown.js" + ], + "dependencies": { + "ccount": "^1.0.0", + "mdast-util-find-and-replace": "^1.1.0", + "micromark": "^2.11.3" + }, + "devDependencies": { + "hast-util-to-html": "^7.0.0", + "mdast-util-from-markdown": "^0.8.5", + "mdast-util-to-hast": "^10.0.0", + "mdast-util-to-markdown": "^0.6.0", + "micromark-extension-gfm-autolink-literal": "^0.5.6", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "xo": "^0.37.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test/index.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "rules": { + "max-params": "off", + "no-self-compare": "off", + "unicorn/prefer-includes": "off", + "unicorn/prefer-optional-catch-binding": "off" + } + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/mdast-util-gfm-autolink-literal/readme.md b/node_modules/mdast-util-gfm-autolink-literal/readme.md new file mode 100644 index 00000000..503ecb21 --- /dev/null +++ b/node_modules/mdast-util-gfm-autolink-literal/readme.md @@ -0,0 +1,188 @@ +# mdast-util-gfm-autolink-literal + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +Extension for [`mdast-util-from-markdown`][from-markdown] and/or +[`mdast-util-to-markdown`][to-markdown] to support GitHub flavored markdown +autolink literals in **[mdast][]**. +When parsing (`from-markdown`), must be combined with +[`micromark-extension-gfm-autolink-literal`][extension]. + +You might want to use this package through [`remark-gfm`][remark-gfm] with +**[remark][]**. + +## Install + +[npm][]: + +```sh +npm install mdast-util-gfm-autolink-literal +``` + +## Use + +Say our script, `example.js`, looks as follows: + +```js +var fromMarkdown = require('mdast-util-from-markdown') +var toMarkdown = require('mdast-util-to-markdown') +var syntax = require('micromark-extension-gfm-autolink-literal') +var autolinkLiteral = require('mdast-util-gfm-autolink-literal') + +var doc = 'www.example.com, https://example.com, and contact@example.com.' + +var tree = fromMarkdown(doc, { + extensions: [syntax], + mdastExtensions: [autolinkLiteral.fromMarkdown] +}) + +console.log(tree) + +var out = toMarkdown(tree, {extensions: [autolinkLiteral.toMarkdown]}) + +console.log(out) +``` + +Now, running `node example` yields: + +```js +{ + type: 'root', + children: [ + { + type: 'paragraph', + children: [ + { + type: 'link', + title: null, + url: 'http://www.example.com', + children: [{type: 'text', value: 'www.example.com'}] + }, + {type: 'text', value: ', '}, + { + type: 'link', + title: null, + url: 'https://example.com', + children: [{type: 'text', value: 'https://example.com'}] + }, + {type: 'text', value: ', and '}, + { + type: 'link', + title: null, + url: 'mailto:contact@example.com', + children: [{type: 'text', value: 'contact@example.com'}] + }, + {type: 'text', value: '.'} + ] + } + ] +} +``` + +```markdown +[www.example.com](http://www.example.com), , and . +``` + +## API + +### `autolinkLiteral.fromMarkdown` + +### `autolinkLiteral.toMarkdown` + +> Note: the separate extensions are also available at +> `mdast-util-gfm-autolink-literal/from-markdown` and +> `mdast-util-gfm-autolink-literal/to-markdown`. + +Support literal autolinks. +The exports are extensions, respectively +for [`mdast-util-from-markdown`][from-markdown] and +[`mdast-util-to-markdown`][to-markdown]. + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`remarkjs/remark-gfm`][remark-gfm] + — remark plugin to support GFM +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`micromark/micromark-extension-gfm-autolink-literal`][extension] + — micromark extension to parse GFM autolink literals +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/syntax-tree/mdast-util-gfm-autolink-literal/workflows/main/badge.svg + +[build]: https://github.com/syntax-tree/mdast-util-gfm-autolink-literal/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/mdast-util-gfm-autolink-literal.svg + +[coverage]: https://codecov.io/github/syntax-tree/mdast-util-gfm-autolink-literal + +[downloads-badge]: https://img.shields.io/npm/dm/mdast-util-gfm-autolink-literal.svg + +[downloads]: https://www.npmjs.com/package/mdast-util-gfm-autolink-literal + +[size-badge]: https://img.shields.io/bundlephobia/minzip/mdast-util-gfm-autolink-literal.svg + +[size]: https://bundlephobia.com/result?p=mdast-util-gfm-autolink-literal + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/syntax-tree/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md + +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md + +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md + +[mdast]: https://github.com/syntax-tree/mdast + +[remark]: https://github.com/remarkjs/remark + +[remark-gfm]: https://github.com/remarkjs/remark-gfm + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[micromark]: https://github.com/micromark/micromark + +[extension]: https://github.com/micromark/micromark-extension-gfm-autolink-literal diff --git a/node_modules/mdast-util-gfm-autolink-literal/to-markdown.js b/node_modules/mdast-util-gfm-autolink-literal/to-markdown.js new file mode 100644 index 00000000..e43f2d07 --- /dev/null +++ b/node_modules/mdast-util-gfm-autolink-literal/to-markdown.js @@ -0,0 +1,26 @@ +var inConstruct = 'phrasing' +var notInConstruct = ['autolink', 'link', 'image', 'label'] + +exports.unsafe = [ + { + character: '@', + before: '[+\\-.\\w]', + after: '[\\-.\\w]', + inConstruct: inConstruct, + notInConstruct: notInConstruct + }, + { + character: '.', + before: '[Ww]', + after: '[\\-.\\w]', + inConstruct: inConstruct, + notInConstruct: notInConstruct + }, + { + character: ':', + before: '[ps]', + after: '\\/', + inConstruct: inConstruct, + notInConstruct: notInConstruct + } +] diff --git a/node_modules/mdast-util-gfm-strikethrough/from-markdown.js b/node_modules/mdast-util-gfm-strikethrough/from-markdown.js new file mode 100644 index 00000000..f1601250 --- /dev/null +++ b/node_modules/mdast-util-gfm-strikethrough/from-markdown.js @@ -0,0 +1,11 @@ +exports.canContainEols = ['delete'] +exports.enter = {strikethrough: enterStrikethrough} +exports.exit = {strikethrough: exitStrikethrough} + +function enterStrikethrough(token) { + this.enter({type: 'delete', children: []}, token) +} + +function exitStrikethrough(token) { + this.exit(token) +} diff --git a/node_modules/mdast-util-gfm-strikethrough/index.js b/node_modules/mdast-util-gfm-strikethrough/index.js new file mode 100644 index 00000000..6da28c60 --- /dev/null +++ b/node_modules/mdast-util-gfm-strikethrough/index.js @@ -0,0 +1,2 @@ +exports.fromMarkdown = require('./from-markdown') +exports.toMarkdown = require('./to-markdown') diff --git a/node_modules/mdast-util-gfm-strikethrough/license b/node_modules/mdast-util-gfm-strikethrough/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/mdast-util-gfm-strikethrough/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mdast-util-gfm-strikethrough/package.json b/node_modules/mdast-util-gfm-strikethrough/package.json new file mode 100644 index 00000000..3158d23c --- /dev/null +++ b/node_modules/mdast-util-gfm-strikethrough/package.json @@ -0,0 +1,80 @@ +{ + "name": "mdast-util-gfm-strikethrough", + "version": "0.2.3", + "description": "mdast extension to parse and serialize GFM strikethrough", + "license": "MIT", + "keywords": [ + "unist", + "mdast", + "mdast-util", + "util", + "utility", + "markdown", + "markup", + "strikethrough", + "strike", + "through", + "del", + "delete", + "deletion", + "gfm" + ], + "repository": "syntax-tree/mdast-util-gfm-strikethrough", + "bugs": "https://github.com/syntax-tree/mdast-util-gfm-strikethrough/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "from-markdown.js", + "index.js", + "to-markdown.js" + ], + "dependencies": { + "mdast-util-to-markdown": "^0.6.0" + }, + "devDependencies": { + "mdast-util-from-markdown": "^0.8.0", + "micromark-extension-gfm-strikethrough": "^0.6.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "unist-util-remove-position": "^3.0.0", + "xo": "^0.36.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/mdast-util-gfm-strikethrough/readme.md b/node_modules/mdast-util-gfm-strikethrough/readme.md new file mode 100644 index 00000000..cb0a0548 --- /dev/null +++ b/node_modules/mdast-util-gfm-strikethrough/readme.md @@ -0,0 +1,173 @@ +# mdast-util-gfm-strikethrough + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +Extension for [`mdast-util-from-markdown`][from-markdown] and/or +[`mdast-util-to-markdown`][to-markdown] to support GitHub flavored markdown +strikethrough (~~like this~~) in **[mdast][]**. +When parsing (`from-markdown`), must be combined with +[`micromark-extension-gfm-strikethrough`][extension]. + +You probably shouldn’t use this package directly, but instead use +[`remark-gfm`][remark-gfm] with **[remark][]**. + +## Install + +[npm][]: + +```sh +npm install mdast-util-gfm-strikethrough +``` + +## Use + +Say our script, `example.js`, looks as follows: + +```js +var fromMarkdown = require('mdast-util-from-markdown') +var toMarkdown = require('mdast-util-to-markdown') +var syntax = require('micromark-extension-gfm-strikethrough') +var strikethrough = require('mdast-util-gfm-strikethrough') + +var doc = '*Emphasis*, **importance**, and ~~strikethrough~~.' + +var tree = fromMarkdown(doc, { + extensions: [syntax()], + mdastExtensions: [strikethrough.fromMarkdown] +}) + +console.log(tree) + +var out = toMarkdown(tree, {extensions: [strikethrough.toMarkdown]}) + +console.log(out) +``` + +Now, running `node example` yields: + +```js +{ + type: 'root', + children: [ + { + type: 'paragraph', + children: [ + {type: 'emphasis', children: [{type: 'text', value: 'Emphasis'}]}, + {type: 'text', value: ', '}, + {type: 'strong', children: [{type: 'text', value: 'importance'}]}, + {type: 'text', value: ', and '}, + {type: 'delete', children: [{type: 'text', value: 'strikethrough'}]}, + {type: 'text', value: '.'} + ] + } + ] +} +``` + +```markdown +*Emphasis*, **importance**, and ~~strikethrough~~. +``` + +## API + +### `strikethrough.fromMarkdown` + +### `strikethrough.toMarkdown` + +> Note: the separate extensions are also available at +> `mdast-util-gfm-strikethrough/from-markdown` and +> `mdast-util-gfm-strikethrough/to-markdown`. + +Support strikethrough. +The exports are extensions, respectively +for [`mdast-util-from-markdown`][from-markdown] and +[`mdast-util-to-markdown`][to-markdown]. + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`remarkjs/remark-gfm`][remark-gfm] + — remark plugin to support GFM +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`micromark/micromark-extension-gfm-strikethrough`][extension] + — micromark extension to parse GFM strikethrough +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/syntax-tree/mdast-util-gfm-strikethrough/workflows/main/badge.svg + +[build]: https://github.com/syntax-tree/mdast-util-gfm-strikethrough/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/mdast-util-gfm-strikethrough.svg + +[coverage]: https://codecov.io/github/syntax-tree/mdast-util-gfm-strikethrough + +[downloads-badge]: https://img.shields.io/npm/dm/mdast-util-gfm-strikethrough.svg + +[downloads]: https://www.npmjs.com/package/mdast-util-gfm-strikethrough + +[size-badge]: https://img.shields.io/bundlephobia/minzip/mdast-util-gfm-strikethrough.svg + +[size]: https://bundlephobia.com/result?p=mdast-util-gfm-strikethrough + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/syntax-tree/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md + +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md + +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md + +[mdast]: https://github.com/syntax-tree/mdast + +[remark]: https://github.com/remarkjs/remark + +[remark-gfm]: https://github.com/remarkjs/remark-gfm + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[micromark]: https://github.com/micromark/micromark + +[extension]: https://github.com/micromark/micromark-extension-gfm-strikethrough diff --git a/node_modules/mdast-util-gfm-strikethrough/to-markdown.js b/node_modules/mdast-util-gfm-strikethrough/to-markdown.js new file mode 100644 index 00000000..6c5d8f23 --- /dev/null +++ b/node_modules/mdast-util-gfm-strikethrough/to-markdown.js @@ -0,0 +1,17 @@ +var phrasing = require('mdast-util-to-markdown/lib/util/container-phrasing') + +exports.unsafe = [{character: '~', inConstruct: 'phrasing'}] +exports.handlers = {delete: handleDelete} + +handleDelete.peek = peekDelete + +function handleDelete(node, _, context) { + var exit = context.enter('emphasis') + var value = phrasing(node, context, {before: '~', after: '~'}) + exit() + return '~~' + value + '~~' +} + +function peekDelete() { + return '~' +} diff --git a/node_modules/mdast-util-gfm-table/from-markdown.js b/node_modules/mdast-util-gfm-table/from-markdown.js new file mode 100644 index 00000000..126a85e5 --- /dev/null +++ b/node_modules/mdast-util-gfm-table/from-markdown.js @@ -0,0 +1,53 @@ +exports.enter = { + table: enterTable, + tableData: enterCell, + tableHeader: enterCell, + tableRow: enterRow +} +exports.exit = { + codeText: exitCodeText, + table: exitTable, + tableData: exit, + tableHeader: exit, + tableRow: exit +} + +function enterTable(token) { + this.enter({type: 'table', align: token._align, children: []}, token) + this.setData('inTable', true) +} + +function exitTable(token) { + this.exit(token) + this.setData('inTable') +} + +function enterRow(token) { + this.enter({type: 'tableRow', children: []}, token) +} + +function exit(token) { + this.exit(token) +} + +function enterCell(token) { + this.enter({type: 'tableCell', children: []}, token) +} + +// Overwrite the default code text data handler to unescape escaped pipes when +// they are in tables. +function exitCodeText(token) { + var value = this.resume() + + if (this.getData('inTable')) { + value = value.replace(/\\([\\|])/g, replace) + } + + this.stack[this.stack.length - 1].value = value + this.exit(token) +} + +function replace($0, $1) { + // Pipes work, backslashes don’t (but can’t escape pipes). + return $1 === '|' ? $1 : $0 +} diff --git a/node_modules/mdast-util-gfm-table/index.js b/node_modules/mdast-util-gfm-table/index.js new file mode 100644 index 00000000..6da28c60 --- /dev/null +++ b/node_modules/mdast-util-gfm-table/index.js @@ -0,0 +1,2 @@ +exports.fromMarkdown = require('./from-markdown') +exports.toMarkdown = require('./to-markdown') diff --git a/node_modules/mdast-util-gfm-table/license b/node_modules/mdast-util-gfm-table/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/mdast-util-gfm-table/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mdast-util-gfm-table/package.json b/node_modules/mdast-util-gfm-table/package.json new file mode 100644 index 00000000..10c9aa48 --- /dev/null +++ b/node_modules/mdast-util-gfm-table/package.json @@ -0,0 +1,84 @@ +{ + "name": "mdast-util-gfm-table", + "version": "0.1.6", + "description": "mdast extension to parse and serialize GFM tables", + "license": "MIT", + "keywords": [ + "unist", + "mdast", + "mdast-util", + "util", + "utility", + "markdown", + "markup", + "table", + "row", + "column", + "cell", + "tabular", + "gfm" + ], + "repository": "syntax-tree/mdast-util-gfm-table", + "bugs": "https://github.com/syntax-tree/mdast-util-gfm-table/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "from-markdown.js", + "index.js", + "to-markdown.js" + ], + "dependencies": { + "markdown-table": "^2.0.0", + "mdast-util-to-markdown": "~0.6.0" + }, + "devDependencies": { + "mdast-util-from-markdown": "^0.8.0", + "micromark-extension-gfm-table": "^0.4.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "string-width": "^4.0.0", + "tape": "^5.0.0", + "unist-util-remove-position": "^3.0.0", + "xo": "^0.37.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "rules": { + "unicorn/prefer-includes": "off" + } + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/mdast-util-gfm-table/readme.md b/node_modules/mdast-util-gfm-table/readme.md new file mode 100644 index 00000000..ab211a93 --- /dev/null +++ b/node_modules/mdast-util-gfm-table/readme.md @@ -0,0 +1,231 @@ +# mdast-util-gfm-table + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +Extension for [`mdast-util-from-markdown`][from-markdown] and/or +[`mdast-util-to-markdown`][to-markdown] to support GitHub flavored markdown +tables in **[mdast][]**. +When parsing (`from-markdown`), must be combined with +[`micromark-extension-gfm-table`][extension]. + +You probably shouldn’t use this package directly, but instead use +[`remark-gfm`][remark-gfm] with **[remark][]**. + +## Install + +[npm][]: + +```sh +npm install mdast-util-gfm-table +``` + +## Use + +Say we have the following file, `example.md`: + +```markdown +| a | b | c | d | +| - | :- | -: | :-: | +| e | f | +| g | h | i | j | k | +``` + +And our script, `example.js`, looks as follows: + +```js +var fs = require('fs') +var fromMarkdown = require('mdast-util-from-markdown') +var toMarkdown = require('mdast-util-to-markdown') +var syntax = require('micromark-extension-gfm-table') +var table = require('mdast-util-gfm-table') + +var doc = fs.readFileSync('example.md') + +var tree = fromMarkdown(doc, { + extensions: [syntax], + mdastExtensions: [table.fromMarkdown] +}) + +console.log(tree) + +var out = toMarkdown(tree, {extensions: [table.toMarkdown()]}) + +console.log(out) +``` + +Now, running `node example` yields (positional info removed for the sake of +brevity): + +```js +{ + type: 'root', + children: [ + { + type: 'table', + align: [null, 'left', 'right', 'center'], + children: [ + { + type: 'tableRow', + children: [ + {type: 'tableCell', children: [{type: 'text', value: 'a'}]}, + {type: 'tableCell', children: [{type: 'text', value: 'b'}]}, + {type: 'tableCell', children: [{type: 'text', value: 'c'}]}, + {type: 'tableCell', children: [{type: 'text', value: 'd'}]} + ] + }, + { + type: 'tableRow', + children: [ + {type: 'tableCell', children: [{type: 'text', value: 'e'}]}, + {type: 'tableCell', children: [{type: 'text', value: 'f'}]} + ] + }, + { + type: 'tableRow', + children: [ + {type: 'tableCell', children: [{type: 'text', value: 'g'}]}, + {type: 'tableCell', children: [{type: 'text', value: 'h'}]}, + {type: 'tableCell', children: [{type: 'text', value: 'i'}]}, + {type: 'tableCell', children: [{type: 'text', value: 'j'}]}, + {type: 'tableCell', children: [{type: 'text', value: 'k'}]} + ] + } + ] + } + ] +} +``` + +```markdown +| a | b | c | d | | +| - | :- | -: | :-: | - | +| e | f | | | | +| g | h | i | j | k | +``` + +## API + +### `table.fromMarkdown` + +### `table.toMarkdown(options?)` + +> Note: the separate extensions are also available at +> `mdast-util-gfm-table/from-markdown` and +> `mdast-util-gfm-table/to-markdown`. + +Support tables. +The exports of `fromMarkdown` is an extension for +[`mdast-util-from-markdown`][from-markdown]. +The export of `toMarkdown` is a function that can be called with options and +returns an extension for [`mdast-util-to-markdown`][to-markdown]. + +##### `options` + +###### `options.tableCellPadding` + +Create tables with a space between cell delimiters (`|`) and content (`boolean`, +default: `true`). + +###### `options.tablePipeAlign` + +Align the delimiters (`|`) between table cells so that they all align nicely and +form a grid (`boolean`, default: `true`). + +###### `options.stringLength` + +Function passed to [`markdown-table`][markdown-table] to detect the length of a +table cell (`Function`, default: [`s => s.length`][string-length]). +Used to pad tables. + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`remarkjs/remark-gfm`][remark-gfm] + — remark plugin to support GFM +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`micromark/micromark-extension-gfm-table`][extension] + — micromark extension to parse GFM tables +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/syntax-tree/mdast-util-gfm-table/workflows/main/badge.svg + +[build]: https://github.com/syntax-tree/mdast-util-gfm-table/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/mdast-util-gfm-table.svg + +[coverage]: https://codecov.io/github/syntax-tree/mdast-util-gfm-table + +[downloads-badge]: https://img.shields.io/npm/dm/mdast-util-gfm-table.svg + +[downloads]: https://www.npmjs.com/package/mdast-util-gfm-table + +[size-badge]: https://img.shields.io/bundlephobia/minzip/mdast-util-gfm-table.svg + +[size]: https://bundlephobia.com/result?p=mdast-util-gfm-table + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/syntax-tree/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md + +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md + +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md + +[mdast]: https://github.com/syntax-tree/mdast + +[remark]: https://github.com/remarkjs/remark + +[remark-gfm]: https://github.com/remarkjs/remark-gfm + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[micromark]: https://github.com/micromark/micromark + +[extension]: https://github.com/micromark/micromark-extension-gfm-table + +[markdown-table]: https://github.com/wooorm/markdown-table + +[string-length]: https://github.com/wooorm/markdown-table#optionsstringlength diff --git a/node_modules/mdast-util-gfm-table/to-markdown.js b/node_modules/mdast-util-gfm-table/to-markdown.js new file mode 100644 index 00000000..f9b3a43d --- /dev/null +++ b/node_modules/mdast-util-gfm-table/to-markdown.js @@ -0,0 +1,112 @@ +var phrasing = require('mdast-util-to-markdown/lib/util/container-phrasing') +var defaultInlineCode = require('mdast-util-to-markdown/lib/handle/inline-code') +var markdownTable = require('markdown-table') + +module.exports = toMarkdown + +function toMarkdown(options) { + var settings = options || {} + var padding = settings.tableCellPadding + var alignDelimiters = settings.tablePipeAlign + var stringLength = settings.stringLength + var around = padding ? ' ' : '|' + + return { + unsafe: [ + {character: '\r', inConstruct: 'tableCell'}, + {character: '\n', inConstruct: 'tableCell'}, + // A pipe, when followed by a tab or space (padding), or a dash or colon + // (unpadded delimiter row), could result in a table. + {atBreak: true, character: '|', after: '[\t :-]'}, + // A pipe in a cell must be encoded. + {character: '|', inConstruct: 'tableCell'}, + // A colon must be followed by a dash, in which case it could start a + // delimiter row. + {atBreak: true, character: ':', after: '-'}, + // A delimiter row can also start with a dash, when followed by more + // dashes, a colon, or a pipe. + // This is a stricter version than the built in check for lists, thematic + // breaks, and setex heading underlines though: + // + {atBreak: true, character: '-', after: '[:|-]'} + ], + handlers: { + table: handleTable, + tableRow: handleTableRow, + tableCell: handleTableCell, + inlineCode: inlineCodeWithTable + } + } + + function handleTable(node, _, context) { + return serializeData(handleTableAsData(node, context), node.align) + } + + // This function isn’t really used normally, because we handle rows at the + // table level. + // But, if someone passes in a table row, this ensures we make somewhat sense. + function handleTableRow(node, _, context) { + var row = handleTableRowAsData(node, context) + // `markdown-table` will always add an align row + var value = serializeData([row]) + return value.slice(0, value.indexOf('\n')) + } + + function handleTableCell(node, _, context) { + var exit = context.enter('tableCell') + var value = phrasing(node, context, {before: around, after: around}) + exit() + return value + } + + function serializeData(matrix, align) { + return markdownTable(matrix, { + align: align, + alignDelimiters: alignDelimiters, + padding: padding, + stringLength: stringLength + }) + } + + function handleTableAsData(node, context) { + var children = node.children + var index = -1 + var length = children.length + var result = [] + var subexit = context.enter('table') + + while (++index < length) { + result[index] = handleTableRowAsData(children[index], context) + } + + subexit() + + return result + } + + function handleTableRowAsData(node, context) { + var children = node.children + var index = -1 + var length = children.length + var result = [] + var subexit = context.enter('tableRow') + + while (++index < length) { + result[index] = handleTableCell(children[index], node, context) + } + + subexit() + + return result + } + + function inlineCodeWithTable(node, parent, context) { + var value = defaultInlineCode(node, parent, context) + + if (context.stack.indexOf('tableCell') !== -1) { + value = value.replace(/\|/g, '\\$&') + } + + return value + } +} diff --git a/node_modules/mdast-util-gfm-task-list-item/from-markdown.js b/node_modules/mdast-util-gfm-task-list-item/from-markdown.js new file mode 100644 index 00000000..5aa03285 --- /dev/null +++ b/node_modules/mdast-util-gfm-task-list-item/from-markdown.js @@ -0,0 +1,50 @@ +exports.exit = { + taskListCheckValueChecked: exitCheck, + taskListCheckValueUnchecked: exitCheck, + paragraph: exitParagraphWithTaskListItem +} + +function exitCheck(token) { + // We’re always in a paragraph, in a list item. + this.stack[this.stack.length - 2].checked = + token.type === 'taskListCheckValueChecked' +} + +function exitParagraphWithTaskListItem(token) { + var parent = this.stack[this.stack.length - 2] + var node = this.stack[this.stack.length - 1] + var siblings = parent.children + var head = node.children[0] + var index = -1 + var firstParaghraph + + if ( + parent && + parent.type === 'listItem' && + typeof parent.checked === 'boolean' && + head && + head.type === 'text' + ) { + while (++index < siblings.length) { + if (siblings[index].type === 'paragraph') { + firstParaghraph = siblings[index] + break + } + } + + if (firstParaghraph === node) { + // Must start with a space or a tab. + head.value = head.value.slice(1) + + if (head.value.length === 0) { + node.children.shift() + } else { + head.position.start.column++ + head.position.start.offset++ + node.position.start = Object.assign({}, head.position.start) + } + } + } + + this.exit(token) +} diff --git a/node_modules/mdast-util-gfm-task-list-item/index.js b/node_modules/mdast-util-gfm-task-list-item/index.js new file mode 100644 index 00000000..6da28c60 --- /dev/null +++ b/node_modules/mdast-util-gfm-task-list-item/index.js @@ -0,0 +1,2 @@ +exports.fromMarkdown = require('./from-markdown') +exports.toMarkdown = require('./to-markdown') diff --git a/node_modules/mdast-util-gfm-task-list-item/license b/node_modules/mdast-util-gfm-task-list-item/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/mdast-util-gfm-task-list-item/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mdast-util-gfm-task-list-item/package.json b/node_modules/mdast-util-gfm-task-list-item/package.json new file mode 100644 index 00000000..d3b3615a --- /dev/null +++ b/node_modules/mdast-util-gfm-task-list-item/package.json @@ -0,0 +1,83 @@ +{ + "name": "mdast-util-gfm-task-list-item", + "version": "0.1.6", + "description": "mdast extension to parse and serialize GFM task list items", + "license": "MIT", + "keywords": [ + "unist", + "mdast", + "mdast-util", + "util", + "utility", + "markdown", + "markup", + "task", + "list", + "item", + "check", + "checkbox", + "todo", + "gfm" + ], + "repository": "syntax-tree/mdast-util-gfm-task-list-item", + "bugs": "https://github.com/syntax-tree/mdast-util-gfm-task-list-item/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "from-markdown.js", + "index.js", + "to-markdown.js" + ], + "dependencies": { + "mdast-util-to-markdown": "~0.6.0" + }, + "devDependencies": { + "mdast-util-from-markdown": "^0.8.0", + "micromark-extension-gfm-task-list-item": "^0.3.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0-alpha.1", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "unist-util-remove-position": "^3.0.0", + "xo": "^0.36.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "rules": { + "unicorn/prefer-includes": "off" + } + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/mdast-util-gfm-task-list-item/readme.md b/node_modules/mdast-util-gfm-task-list-item/readme.md new file mode 100644 index 00000000..faaed07c --- /dev/null +++ b/node_modules/mdast-util-gfm-task-list-item/readme.md @@ -0,0 +1,226 @@ +# mdast-util-gfm-task-list-item + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +Extension for [`mdast-util-from-markdown`][from-markdown] and/or +[`mdast-util-to-markdown`][to-markdown] to support GitHub flavored markdown +task list items in **[mdast][]**. +When parsing (`from-markdown`), must be combined with +[`micromark-extension-gfm-task-list-item`][extension]. + +You probably shouldn’t use this package directly, but instead use +[`remark-gfm`][remark-gfm] with **[remark][]**. + +## Install + +[npm][]: + +```sh +npm install mdast-util-gfm-task-list-item +``` + +## Use + +Say we have the following file, `example.md`: + +```markdown +* [ ] To do +* [x] Done + +1. Mixed… +2. [x] …messages +``` + +And our script, `example.js`, looks as follows: + +```js +var fs = require('fs') +var fromMarkdown = require('mdast-util-from-markdown') +var toMarkdown = require('mdast-util-to-markdown') +var syntax = require('micromark-extension-gfm-task-list-item') +var taskListItem = require('mdast-util-gfm-task-list-item') + +var doc = fs.readFileSync('example.md') + +var tree = fromMarkdown(doc, { + extensions: [syntax], + mdastExtensions: [taskListItem.fromMarkdown] +}) + +console.log(tree) + +var out = toMarkdown(tree, {extensions: [taskListItem.toMarkdown]}) + +console.log(out) +``` + +Now, running `node example` yields (positional info removed for the sake of +brevity): + +```js +{ + type: 'root', + children: [ + { + type: 'list', + ordered: false, + start: null, + spread: false, + children: [ + { + type: 'listItem', + spread: false, + checked: false, + children: [ + {type: 'paragraph', children: [{type: 'text', value: 'To do'}]} + ] + }, + { + type: 'listItem', + spread: false, + checked: true, + children: [ + {type: 'paragraph', children: [{type: 'text', value: 'Done'}]} + ] + } + ] + }, + { + type: 'list', + ordered: true, + start: 1, + spread: false, + children: [ + { + type: 'listItem', + spread: false, + checked: null, + children: [ + {type: 'paragraph', children: [{type: 'text', value: 'Mixed…'}]} + ] + }, + { + type: 'listItem', + spread: false, + checked: true, + children: [ + {type: 'paragraph', children: [{type: 'text', value: '…messages'}]} + ] + } + ] + } + ] +} +``` + +```markdown +* [ ] To do +* [x] Done + +1. Mixed… +2. [x] …messages +``` + +## API + +### `taskListItem.fromMarkdown` + +### `taskListItem.toMarkdown` + +> Note: the separate extensions are also available at +> `mdast-util-gfm-task-list-item/from-markdown` and +> `mdast-util-gfm-task-list-item/to-markdown`. + +Support task list items. +The exports are extensions, respectively +for [`mdast-util-from-markdown`][from-markdown] and +[`mdast-util-to-markdown`][to-markdown]. + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`remarkjs/remark-gfm`][remark-gfm] + — remark plugin to support GFM +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`micromark/micromark-extension-gfm-task-list-item`][extension] + — micromark extension to parse GFM task list items +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/syntax-tree/mdast-util-gfm-task-list-item/workflows/main/badge.svg + +[build]: https://github.com/syntax-tree/mdast-util-gfm-task-list-item/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/mdast-util-gfm-task-list-item.svg + +[coverage]: https://codecov.io/github/syntax-tree/mdast-util-gfm-task-list-item + +[downloads-badge]: https://img.shields.io/npm/dm/mdast-util-gfm-task-list-item.svg + +[downloads]: https://www.npmjs.com/package/mdast-util-gfm-task-list-item + +[size-badge]: https://img.shields.io/bundlephobia/minzip/mdast-util-gfm-task-list-item.svg + +[size]: https://bundlephobia.com/result?p=mdast-util-gfm-task-list-item + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/syntax-tree/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md + +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md + +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md + +[mdast]: https://github.com/syntax-tree/mdast + +[remark]: https://github.com/remarkjs/remark + +[remark-gfm]: https://github.com/remarkjs/remark-gfm + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[micromark]: https://github.com/micromark/micromark + +[extension]: https://github.com/micromark/micromark-extension-gfm-task-list-item diff --git a/node_modules/mdast-util-gfm-task-list-item/to-markdown.js b/node_modules/mdast-util-gfm-task-list-item/to-markdown.js new file mode 100644 index 00000000..4d07e5a0 --- /dev/null +++ b/node_modules/mdast-util-gfm-task-list-item/to-markdown.js @@ -0,0 +1,22 @@ +var defaultListItem = require('mdast-util-to-markdown/lib/handle/list-item') + +exports.unsafe = [{atBreak: true, character: '-', after: '[:|-]'}] + +exports.handlers = { + listItem: listItemWithTaskListItem +} + +function listItemWithTaskListItem(node, parent, context) { + var value = defaultListItem(node, parent, context) + var head = node.children[0] + + if (typeof node.checked === 'boolean' && head && head.type === 'paragraph') { + value = value.replace(/^(?:[*+-]|\d+\.)([\r\n]| {1,3})/, check) + } + + return value + + function check($0) { + return $0 + '[' + (node.checked ? 'x' : ' ') + '] ' + } +} diff --git a/node_modules/mdast-util-gfm/from-markdown.js b/node_modules/mdast-util-gfm/from-markdown.js new file mode 100644 index 00000000..c367a563 --- /dev/null +++ b/node_modules/mdast-util-gfm/from-markdown.js @@ -0,0 +1,42 @@ +var autolinkLiteral = require('mdast-util-gfm-autolink-literal/from-markdown') +var strikethrough = require('mdast-util-gfm-strikethrough/from-markdown') +var table = require('mdast-util-gfm-table/from-markdown') +var taskListItem = require('mdast-util-gfm-task-list-item/from-markdown') + +var own = {}.hasOwnProperty + +module.exports = configure([ + autolinkLiteral, + strikethrough, + table, + taskListItem +]) + +function configure(extensions) { + var config = {transforms: [], canContainEols: []} + var length = extensions.length + var index = -1 + + while (++index < length) { + extension(config, extensions[index]) + } + + return config +} + +function extension(config, extension) { + var key + var left + var right + + for (key in extension) { + left = own.call(config, key) ? config[key] : (config[key] = {}) + right = extension[key] + + if (key === 'canContainEols' || key === 'transforms') { + config[key] = [].concat(left, right) + } else { + Object.assign(left, right) + } + } +} diff --git a/node_modules/mdast-util-gfm/index.js b/node_modules/mdast-util-gfm/index.js new file mode 100644 index 00000000..6da28c60 --- /dev/null +++ b/node_modules/mdast-util-gfm/index.js @@ -0,0 +1,2 @@ +exports.fromMarkdown = require('./from-markdown') +exports.toMarkdown = require('./to-markdown') diff --git a/node_modules/mdast-util-gfm/license b/node_modules/mdast-util-gfm/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/mdast-util-gfm/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mdast-util-gfm/package.json b/node_modules/mdast-util-gfm/package.json new file mode 100644 index 00000000..b87bbfe4 --- /dev/null +++ b/node_modules/mdast-util-gfm/package.json @@ -0,0 +1,93 @@ +{ + "name": "mdast-util-gfm", + "version": "0.1.2", + "description": "mdast extension to parse and serialize GFM (GitHub Flavored Markdown)", + "license": "MIT", + "keywords": [ + "unist", + "mdast", + "mdast-util", + "util", + "utility", + "markdown", + "markup", + "table", + "strikethrough", + "tasklist", + "autolink", + "tagfilter", + "github", + "gfm", + "gfm" + ], + "repository": "syntax-tree/mdast-util-gfm", + "bugs": "https://github.com/syntax-tree/mdast-util-gfm/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "from-markdown.js", + "index.js", + "to-markdown.js" + ], + "dependencies": { + "mdast-util-gfm-autolink-literal": "^0.1.0", + "mdast-util-gfm-strikethrough": "^0.2.0", + "mdast-util-gfm-table": "^0.1.0", + "mdast-util-gfm-task-list-item": "^0.1.0", + "mdast-util-to-markdown": "^0.6.1" + }, + "devDependencies": { + "github-slugger": "^1.0.0", + "hast-util-to-html": "^7.0.0", + "mdast-util-from-markdown": "^0.8.0", + "mdast-util-to-hast": "^10.0.0", + "micromark-extension-gfm": "^0.3.0", + "node-fetch": "^2.0.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "xo": "^0.37.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "crawl": "node script/crawl-tests", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test/index.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "rules": { + "guard-for-in": "off", + "unicorn/prefer-optional-catch-binding": "off" + } + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/mdast-util-gfm/readme.md b/node_modules/mdast-util-gfm/readme.md new file mode 100644 index 00000000..8ddc9fa8 --- /dev/null +++ b/node_modules/mdast-util-gfm/readme.md @@ -0,0 +1,320 @@ +# mdast-util-gfm + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +Extension for [`mdast-util-from-markdown`][from-markdown] and/or +[`mdast-util-to-markdown`][to-markdown] to support GitHub flavored markdown in +**[mdast][]**. +When parsing (`from-markdown`), must be combined with +[`micromark-extension-gfm`][extension]. + +You probably shouldn’t use this package directly, but instead use +[`remark-gfm`][remark-gfm] with **[remark][]**. + +Alternatively, the extensions can be used separately: + +* [`syntax-tree/mdast-util-gfm-autolink-literal`](https://github.com/syntax-tree/mdast-util-gfm-autolink-literal) + — support GFM autolink literals +* [`syntax-tree/mdast-util-gfm-strikethrough`](https://github.com/syntax-tree/mdast-util-gfm-strikethrough) + — support GFM strikethrough +* [`syntax-tree/mdast-util-gfm-table`](https://github.com/syntax-tree/mdast-util-gfm-table) + — support GFM tables +* [`syntax-tree/mdast-util-gfm-task-list-item`](https://github.com/syntax-tree/mdast-util-gfm-task-list-item) + — support GFM tasklists + +## Install + +[npm][]: + +```sh +npm install mdast-util-gfm +``` + +## Use + +Say we have the following file, `example.md`: + +```markdown +# GFM + +## Autolink literals + +www.example.com, https://example.com, and contact@example.com. + +## Strikethrough + +~one~ or ~~two~~ tildes. + +## Table + +| a | b | c | d | +| - | :- | -: | :-: | + +## Tasklist + +* [ ] to do +* [x] done +``` + +And our script, `example.js`, looks as follows: + +```js +var fs = require('fs') +var fromMarkdown = require('mdast-util-from-markdown') +var toMarkdown = require('mdast-util-to-markdown') +var syntax = require('micromark-extension-gfm') +var gfm = require('mdast-util-gfm') + +var doc = fs.readFileSync('example.md') + +var tree = fromMarkdown(doc, { + extensions: [syntax()], + mdastExtensions: [gfm.fromMarkdown] +}) + +console.log(tree) + +var out = toMarkdown(tree, {extensions: [gfm.toMarkdown()]}) + +console.log(out) +``` + +Now, running `node example` yields: + +```js +{ + type: 'root', + children: [ + {type: 'heading', depth: 1, children: [{type: 'text', value: 'GFM'}]}, + { + type: 'heading', + depth: 2, + children: [{type: 'text', value: 'Autolink literals'}] + }, + { + type: 'paragraph', + children: [ + { + type: 'link', + title: null, + url: 'http://www.example.com', + children: [{type: 'text', value: 'www.example.com'}] + }, + {type: 'text', value: ', '}, + { + type: 'link', + title: null, + url: 'https://example.com', + children: [{type: 'text', value: 'https://example.com'}] + }, + {type: 'text', value: ', and '}, + { + type: 'link', + title: null, + url: 'mailto:contact@example.com', + children: [{type: 'text', value: 'contact@example.com'}] + }, + {type: 'text', value: '.'} + ] + }, + { + type: 'heading', + depth: 2, + children: [{type: 'text', value: 'Strikethrough'}] + }, + { + type: 'paragraph', + children: [ + { + type: 'delete', + children: [{type: 'text', value: 'one'}] + }, + {type: 'text', value: ' or '}, + { + type: 'delete', + children: [{type: 'text', value: 'two'}] + }, + {type: 'text', value: ' tildes.'} + ] + }, + {type: 'heading', depth: 2, children: [{type: 'text', value: 'Table'}]}, + { + type: 'table', + align: [null, 'left', 'right', 'center'], + children: [ + { + type: 'tableRow', + children: [ + {type: 'tableCell', children: [{type: 'text', value: 'a'}]}, + {type: 'tableCell', children: [{type: 'text', value: 'b'}]}, + {type: 'tableCell', children: [{type: 'text', value: 'c'}]}, + {type: 'tableCell', children: [{type: 'text', value: 'd'}]} + ] + } + ] + }, + {type: 'heading', depth: 2, children: [{type: 'text', value: 'Tasklist'}]}, + { + type: 'list', + ordered: false, + start: null, + spread: false, + children: [ + { + type: 'listItem', + spread: false, + checked: false, + children: [ + {type: 'paragraph', children: [{type: 'text', value: 'to do'}]} + ] + }, + { + type: 'listItem', + spread: false, + checked: true, + children: [ + {type: 'paragraph', children: [{type: 'text', value: 'done'}]} + ] + } + ] + } + ] +} +``` + +```markdown +# GFM + +## Autolink literals + +[www.example.com](http://www.example.com), , and . + +## Strikethrough + +~~one~~ or ~~two~~ tildes. + +## Table + +| a | b | c | d | +| - | :- | -: | :-: | + +## Tasklist + +* [ ] to do +* [x] done +``` + +## API + +### `gfm.fromMarkdown` + +### `gfm.toMarkdown(options?)` + +> Note: the separate extensions are also available at +> `mdast-util-gfm/from-markdown` and +> `mdast-util-gfm/to-markdown`. + +Support GFM. +The exports of `fromMarkdown` is an extension for +[`mdast-util-from-markdown`][from-markdown]. +The export of `toMarkdown` is a function that can be called with options and +returns an extension for [`mdast-util-to-markdown`][to-markdown]. + +###### `options` + +Passed as `options` to [`mdast-util-gfm-table`][table]. + +The exports are extensions, respectively +for [`mdast-util-from-markdown`][from-markdown] and +[`mdast-util-to-markdown`][to-markdown]. + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`remarkjs/remark-gfm`][remark-gfm] + — remark plugin to support GFM +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`micromark/micromark-extension-gfm`][extension] + — micromark extension to parse GFM +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/syntax-tree/mdast-util-gfm/workflows/main/badge.svg + +[build]: https://github.com/syntax-tree/mdast-util-gfm/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/mdast-util-gfm.svg + +[coverage]: https://codecov.io/github/syntax-tree/mdast-util-gfm + +[downloads-badge]: https://img.shields.io/npm/dm/mdast-util-gfm.svg + +[downloads]: https://www.npmjs.com/package/mdast-util-gfm + +[size-badge]: https://img.shields.io/bundlephobia/minzip/mdast-util-gfm.svg + +[size]: https://bundlephobia.com/result?p=mdast-util-gfm + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/syntax-tree/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md + +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md + +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md + +[mdast]: https://github.com/syntax-tree/mdast + +[remark]: https://github.com/remarkjs/remark + +[remark-gfm]: https://github.com/remarkjs/remark-gfm + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[micromark]: https://github.com/micromark/micromark + +[extension]: https://github.com/micromark/micromark-extension-gfm + +[table]: https://github.com/syntax-tree/mdast-util-gfm-table#options diff --git a/node_modules/mdast-util-gfm/to-markdown.js b/node_modules/mdast-util-gfm/to-markdown.js new file mode 100644 index 00000000..cf7b8357 --- /dev/null +++ b/node_modules/mdast-util-gfm/to-markdown.js @@ -0,0 +1,22 @@ +var autolinkLiteral = require('mdast-util-gfm-autolink-literal/to-markdown') +var strikethrough = require('mdast-util-gfm-strikethrough/to-markdown') +var table = require('mdast-util-gfm-table/to-markdown') +var taskListItem = require('mdast-util-gfm-task-list-item/to-markdown') +var configure = require('mdast-util-to-markdown/lib/configure') + +module.exports = toMarkdown + +function toMarkdown(options) { + var config = configure( + {handlers: {}, join: [], unsafe: [], options: {}}, + { + extensions: [autolinkLiteral, strikethrough, table(options), taskListItem] + } + ) + + return Object.assign(config.options, { + handlers: config.handlers, + join: config.join, + unsafe: config.unsafe + }) +} diff --git a/node_modules/mdast-util-to-markdown/index.js b/node_modules/mdast-util-to-markdown/index.js new file mode 100644 index 00000000..ce914ea0 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/index.js @@ -0,0 +1 @@ +module.exports = require('./lib') diff --git a/node_modules/mdast-util-to-markdown/lib/configure.js b/node_modules/mdast-util-to-markdown/lib/configure.js new file mode 100644 index 00000000..7c4e684f --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/configure.js @@ -0,0 +1,27 @@ +module.exports = configure + +function configure(base, extension) { + var index = -1 + var key + + // First do subextensions. + if (extension.extensions) { + while (++index < extension.extensions.length) { + configure(base, extension.extensions[index]) + } + } + + for (key in extension) { + if (key === 'extensions') { + // Empty. + } else if (key === 'unsafe' || key === 'join') { + base[key] = base[key].concat(extension[key] || []) + } else if (key === 'handlers') { + base[key] = Object.assign(base[key], extension[key] || {}) + } else { + base.options[key] = extension[key] + } + } + + return base +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/blockquote.js b/node_modules/mdast-util-to-markdown/lib/handle/blockquote.js new file mode 100644 index 00000000..d53d5796 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/blockquote.js @@ -0,0 +1,15 @@ +module.exports = blockquote + +var flow = require('../util/container-flow') +var indentLines = require('../util/indent-lines') + +function blockquote(node, _, context) { + var exit = context.enter('blockquote') + var value = indentLines(flow(node, context), map) + exit() + return value +} + +function map(line, index, blank) { + return '>' + (blank ? '' : ' ') + line +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/break.js b/node_modules/mdast-util-to-markdown/lib/handle/break.js new file mode 100644 index 00000000..dae2870c --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/break.js @@ -0,0 +1,20 @@ +module.exports = hardBreak + +var patternInScope = require('../util/pattern-in-scope') + +function hardBreak(node, _, context, safe) { + var index = -1 + + while (++index < context.unsafe.length) { + // If we can’t put eols in this construct (setext headings, tables), use a + // space instead. + if ( + context.unsafe[index].character === '\n' && + patternInScope(context.stack, context.unsafe[index]) + ) { + return /[ \t]/.test(safe.before) ? '' : ' ' + } + } + + return '\\\n' +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/code.js b/node_modules/mdast-util-to-markdown/lib/handle/code.js new file mode 100644 index 00000000..dadeb7e4 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/code.js @@ -0,0 +1,64 @@ +module.exports = code + +var repeat = require('repeat-string') +var streak = require('longest-streak') +var formatCodeAsIndented = require('../util/format-code-as-indented') +var checkFence = require('../util/check-fence') +var indentLines = require('../util/indent-lines') +var safe = require('../util/safe') + +function code(node, _, context) { + var marker = checkFence(context) + var raw = node.value || '' + var suffix = marker === '`' ? 'GraveAccent' : 'Tilde' + var value + var sequence + var exit + var subexit + + if (formatCodeAsIndented(node, context)) { + exit = context.enter('codeIndented') + value = indentLines(raw, map) + } else { + sequence = repeat(marker, Math.max(streak(raw, marker) + 1, 3)) + exit = context.enter('codeFenced') + value = sequence + + if (node.lang) { + subexit = context.enter('codeFencedLang' + suffix) + value += safe(context, node.lang, { + before: '`', + after: ' ', + encode: ['`'] + }) + subexit() + } + + if (node.lang && node.meta) { + subexit = context.enter('codeFencedMeta' + suffix) + value += + ' ' + + safe(context, node.meta, { + before: ' ', + after: '\n', + encode: ['`'] + }) + subexit() + } + + value += '\n' + + if (raw) { + value += raw + '\n' + } + + value += sequence + } + + exit() + return value +} + +function map(line, _, blank) { + return (blank ? '' : ' ') + line +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/definition.js b/node_modules/mdast-util-to-markdown/lib/handle/definition.js new file mode 100644 index 00000000..eff2a296 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/definition.js @@ -0,0 +1,46 @@ +module.exports = definition + +var association = require('../util/association') +var checkQuote = require('../util/check-quote') +var safe = require('../util/safe') + +function definition(node, _, context) { + var marker = checkQuote(context) + var suffix = marker === '"' ? 'Quote' : 'Apostrophe' + var exit = context.enter('definition') + var subexit = context.enter('label') + var value = + '[' + safe(context, association(node), {before: '[', after: ']'}) + ']: ' + + subexit() + + if ( + // If there’s no url, or… + !node.url || + // If there’s whitespace, enclosed is prettier. + /[ \t\r\n]/.test(node.url) + ) { + subexit = context.enter('destinationLiteral') + value += '<' + safe(context, node.url, {before: '<', after: '>'}) + '>' + } else { + // No whitespace, raw is prettier. + subexit = context.enter('destinationRaw') + value += safe(context, node.url, {before: ' ', after: ' '}) + } + + subexit() + + if (node.title) { + subexit = context.enter('title' + suffix) + value += + ' ' + + marker + + safe(context, node.title, {before: marker, after: marker}) + + marker + subexit() + } + + exit() + + return value +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/emphasis.js b/node_modules/mdast-util-to-markdown/lib/handle/emphasis.js new file mode 100644 index 00000000..eb2b16a5 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/emphasis.js @@ -0,0 +1,21 @@ +module.exports = emphasis +emphasis.peek = emphasisPeek + +var checkEmphasis = require('../util/check-emphasis') +var phrasing = require('../util/container-phrasing') + +// To do: there are cases where emphasis cannot “form” depending on the +// previous or next character of sequences. +// There’s no way around that though, except for injecting zero-width stuff. +// Do we need to safeguard against that? +function emphasis(node, _, context) { + var marker = checkEmphasis(context) + var exit = context.enter('emphasis') + var value = phrasing(node, context, {before: marker, after: marker}) + exit() + return marker + value + marker +} + +function emphasisPeek(node, _, context) { + return context.options.emphasis || '*' +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/heading.js b/node_modules/mdast-util-to-markdown/lib/handle/heading.js new file mode 100644 index 00000000..be89d3c7 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/heading.js @@ -0,0 +1,48 @@ +module.exports = heading + +var repeat = require('repeat-string') +var formatHeadingAsSetext = require('../util/format-heading-as-setext') +var phrasing = require('../util/container-phrasing') + +function heading(node, _, context) { + var rank = Math.max(Math.min(6, node.depth || 1), 1) + var exit + var subexit + var value + var sequence + + if (formatHeadingAsSetext(node, context)) { + exit = context.enter('headingSetext') + subexit = context.enter('phrasing') + value = phrasing(node, context, {before: '\n', after: '\n'}) + subexit() + exit() + + return ( + value + + '\n' + + repeat( + rank === 1 ? '=' : '-', + // The whole size… + value.length - + // Minus the position of the character after the last EOL (or + // 0 if there is none)… + (Math.max(value.lastIndexOf('\r'), value.lastIndexOf('\n')) + 1) + ) + ) + } + + sequence = repeat('#', rank) + exit = context.enter('headingAtx') + subexit = context.enter('phrasing') + value = phrasing(node, context, {before: '# ', after: '\n'}) + value = value ? sequence + ' ' + value : sequence + if (context.options.closeAtx) { + value += ' ' + sequence + } + + subexit() + exit() + + return value +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/html.js b/node_modules/mdast-util-to-markdown/lib/handle/html.js new file mode 100644 index 00000000..86493876 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/html.js @@ -0,0 +1,10 @@ +module.exports = html +html.peek = htmlPeek + +function html(node) { + return node.value || '' +} + +function htmlPeek() { + return '<' +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/image-reference.js b/node_modules/mdast-util-to-markdown/lib/handle/image-reference.js new file mode 100644 index 00000000..0eb6dd29 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/image-reference.js @@ -0,0 +1,37 @@ +module.exports = imageReference +imageReference.peek = imageReferencePeek + +var association = require('../util/association') +var safe = require('../util/safe') + +function imageReference(node, _, context) { + var type = node.referenceType + var exit = context.enter('imageReference') + var subexit = context.enter('label') + var alt = safe(context, node.alt, {before: '[', after: ']'}) + var value = '![' + alt + ']' + var reference + var stack + + subexit() + // Hide the fact that we’re in phrasing, because escapes don’t work. + stack = context.stack + context.stack = [] + subexit = context.enter('reference') + reference = safe(context, association(node), {before: '[', after: ']'}) + subexit() + context.stack = stack + exit() + + if (type === 'full' || !alt || alt !== reference) { + value += '[' + reference + ']' + } else if (type !== 'shortcut') { + value += '[]' + } + + return value +} + +function imageReferencePeek() { + return '!' +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/image.js b/node_modules/mdast-util-to-markdown/lib/handle/image.js new file mode 100644 index 00000000..52cc003a --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/image.js @@ -0,0 +1,53 @@ +module.exports = image +image.peek = imagePeek + +var checkQuote = require('../util/check-quote') +var safe = require('../util/safe') + +function image(node, _, context) { + var quote = checkQuote(context) + var suffix = quote === '"' ? 'Quote' : 'Apostrophe' + var exit = context.enter('image') + var subexit = context.enter('label') + var value = '![' + safe(context, node.alt, {before: '[', after: ']'}) + '](' + + subexit() + + if ( + // If there’s no url but there is a title… + (!node.url && node.title) || + // Or if there’s markdown whitespace or an eol, enclose. + /[ \t\r\n]/.test(node.url) + ) { + subexit = context.enter('destinationLiteral') + value += '<' + safe(context, node.url, {before: '<', after: '>'}) + '>' + } else { + // No whitespace, raw is prettier. + subexit = context.enter('destinationRaw') + value += safe(context, node.url, { + before: '(', + after: node.title ? ' ' : ')' + }) + } + + subexit() + + if (node.title) { + subexit = context.enter('title' + suffix) + value += + ' ' + + quote + + safe(context, node.title, {before: quote, after: quote}) + + quote + subexit() + } + + value += ')' + exit() + + return value +} + +function imagePeek() { + return '!' +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/index.js b/node_modules/mdast-util-to-markdown/lib/handle/index.js new file mode 100644 index 00000000..2908c930 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/index.js @@ -0,0 +1,20 @@ +exports.blockquote = require('./blockquote') +exports.break = require('./break') +exports.code = require('./code') +exports.definition = require('./definition') +exports.emphasis = require('./emphasis') +exports.hardBreak = require('./break') +exports.heading = require('./heading') +exports.html = require('./html') +exports.image = require('./image') +exports.imageReference = require('./image-reference') +exports.inlineCode = require('./inline-code') +exports.link = require('./link') +exports.linkReference = require('./link-reference') +exports.list = require('./list') +exports.listItem = require('./list-item') +exports.paragraph = require('./paragraph') +exports.root = require('./root') +exports.strong = require('./strong') +exports.text = require('./text') +exports.thematicBreak = require('./thematic-break') diff --git a/node_modules/mdast-util-to-markdown/lib/handle/inline-code.js b/node_modules/mdast-util-to-markdown/lib/handle/inline-code.js new file mode 100644 index 00000000..d443d89d --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/inline-code.js @@ -0,0 +1,69 @@ +module.exports = inlineCode +inlineCode.peek = inlineCodePeek + +var patternCompile = require('../util/pattern-compile') + +function inlineCode(node, parent, context) { + var value = node.value || '' + var sequence = '`' + var index = -1 + var pattern + var expression + var match + var position + + // If there is a single grave accent on its own in the code, use a fence of + // two. + // If there are two in a row, use one. + while (new RegExp('(^|[^`])' + sequence + '([^`]|$)').test(value)) { + sequence += '`' + } + + // If this is not just spaces or eols (tabs don’t count), and either the + // first or last character are a space, eol, or tick, then pad with spaces. + if ( + /[^ \r\n]/.test(value) && + (/[ \r\n`]/.test(value.charAt(0)) || + /[ \r\n`]/.test(value.charAt(value.length - 1))) + ) { + value = ' ' + value + ' ' + } + + // We have a potential problem: certain characters after eols could result in + // blocks being seen. + // For example, if someone injected the string `'\n# b'`, then that would + // result in an ATX heading. + // We can’t escape characters in `inlineCode`, but because eols are + // transformed to spaces when going from markdown to HTML anyway, we can swap + // them out. + while (++index < context.unsafe.length) { + pattern = context.unsafe[index] + + // Only look for `atBreak`s. + // Btw: note that `atBreak` patterns will always start the regex at LF or + // CR. + if (!pattern.atBreak) continue + + expression = patternCompile(pattern) + + while ((match = expression.exec(value))) { + position = match.index + + // Support CRLF (patterns only look for one of the characters). + if ( + value.charCodeAt(position) === 10 /* `\n` */ && + value.charCodeAt(position - 1) === 13 /* `\r` */ + ) { + position-- + } + + value = value.slice(0, position) + ' ' + value.slice(match.index + 1) + } + } + + return sequence + value + sequence +} + +function inlineCodePeek() { + return '`' +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/link-reference.js b/node_modules/mdast-util-to-markdown/lib/handle/link-reference.js new file mode 100644 index 00000000..47825b2c --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/link-reference.js @@ -0,0 +1,38 @@ +module.exports = linkReference +linkReference.peek = linkReferencePeek + +var association = require('../util/association') +var phrasing = require('../util/container-phrasing') +var safe = require('../util/safe') + +function linkReference(node, _, context) { + var type = node.referenceType + var exit = context.enter('linkReference') + var subexit = context.enter('label') + var text = phrasing(node, context, {before: '[', after: ']'}) + var value = '[' + text + ']' + var reference + var stack + + subexit() + // Hide the fact that we’re in phrasing, because escapes don’t work. + stack = context.stack + context.stack = [] + subexit = context.enter('reference') + reference = safe(context, association(node), {before: '[', after: ']'}) + subexit() + context.stack = stack + exit() + + if (type === 'full' || !text || text !== reference) { + value += '[' + reference + ']' + } else if (type !== 'shortcut') { + value += '[]' + } + + return value +} + +function linkReferencePeek() { + return '[' +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/link.js b/node_modules/mdast-util-to-markdown/lib/handle/link.js new file mode 100644 index 00000000..729dcaa9 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/link.js @@ -0,0 +1,70 @@ +module.exports = link +link.peek = linkPeek + +var checkQuote = require('../util/check-quote') +var formatLinkAsAutolink = require('../util/format-link-as-autolink') +var phrasing = require('../util/container-phrasing') +var safe = require('../util/safe') + +function link(node, _, context) { + var quote = checkQuote(context) + var suffix = quote === '"' ? 'Quote' : 'Apostrophe' + var exit + var subexit + var value + var stack + + if (formatLinkAsAutolink(node, context)) { + // Hide the fact that we’re in phrasing, because escapes don’t work. + stack = context.stack + context.stack = [] + exit = context.enter('autolink') + value = '<' + phrasing(node, context, {before: '<', after: '>'}) + '>' + exit() + context.stack = stack + return value + } + + exit = context.enter('link') + subexit = context.enter('label') + value = '[' + phrasing(node, context, {before: '[', after: ']'}) + '](' + subexit() + + if ( + // If there’s no url but there is a title… + (!node.url && node.title) || + // Or if there’s markdown whitespace or an eol, enclose. + /[ \t\r\n]/.test(node.url) + ) { + subexit = context.enter('destinationLiteral') + value += '<' + safe(context, node.url, {before: '<', after: '>'}) + '>' + } else { + // No whitespace, raw is prettier. + subexit = context.enter('destinationRaw') + value += safe(context, node.url, { + before: '(', + after: node.title ? ' ' : ')' + }) + } + + subexit() + + if (node.title) { + subexit = context.enter('title' + suffix) + value += + ' ' + + quote + + safe(context, node.title, {before: quote, after: quote}) + + quote + subexit() + } + + value += ')' + + exit() + return value +} + +function linkPeek(node, _, context) { + return formatLinkAsAutolink(node, context) ? '<' : '[' +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/list-item.js b/node_modules/mdast-util-to-markdown/lib/handle/list-item.js new file mode 100644 index 00000000..8124375a --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/list-item.js @@ -0,0 +1,47 @@ +module.exports = listItem + +var repeat = require('repeat-string') +var checkBullet = require('../util/check-bullet') +var checkListItemIndent = require('../util/check-list-item-indent') +var flow = require('../util/container-flow') +var indentLines = require('../util/indent-lines') + +function listItem(node, parent, context) { + var bullet = checkBullet(context) + var listItemIndent = checkListItemIndent(context) + var size + var value + var exit + + if (parent && parent.ordered) { + bullet = + (parent.start > -1 ? parent.start : 1) + + (context.options.incrementListMarker === false + ? 0 + : parent.children.indexOf(node)) + + '.' + } + + size = bullet.length + 1 + + if ( + listItemIndent === 'tab' || + (listItemIndent === 'mixed' && ((parent && parent.spread) || node.spread)) + ) { + size = Math.ceil(size / 4) * 4 + } + + exit = context.enter('listItem') + value = indentLines(flow(node, context), map) + exit() + + return value + + function map(line, index, blank) { + if (index) { + return (blank ? '' : repeat(' ', size)) + line + } + + return (blank ? bullet : bullet + repeat(' ', size - bullet.length)) + line + } +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/list.js b/node_modules/mdast-util-to-markdown/lib/handle/list.js new file mode 100644 index 00000000..7a0803a3 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/list.js @@ -0,0 +1,10 @@ +module.exports = list + +var flow = require('../util/container-flow') + +function list(node, _, context) { + var exit = context.enter('list') + var value = flow(node, context) + exit() + return value +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/paragraph.js b/node_modules/mdast-util-to-markdown/lib/handle/paragraph.js new file mode 100644 index 00000000..84b241ed --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/paragraph.js @@ -0,0 +1,12 @@ +module.exports = paragraph + +var phrasing = require('../util/container-phrasing') + +function paragraph(node, _, context) { + var exit = context.enter('paragraph') + var subexit = context.enter('phrasing') + var value = phrasing(node, context, {before: '\n', after: '\n'}) + subexit() + exit() + return value +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/root.js b/node_modules/mdast-util-to-markdown/lib/handle/root.js new file mode 100644 index 00000000..2a5fb9f2 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/root.js @@ -0,0 +1,7 @@ +module.exports = root + +var flow = require('../util/container-flow') + +function root(node, _, context) { + return flow(node, context) +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/strong.js b/node_modules/mdast-util-to-markdown/lib/handle/strong.js new file mode 100644 index 00000000..6a77ce10 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/strong.js @@ -0,0 +1,21 @@ +module.exports = strong +strong.peek = strongPeek + +var checkStrong = require('../util/check-strong') +var phrasing = require('../util/container-phrasing') + +// To do: there are cases where emphasis cannot “form” depending on the +// previous or next character of sequences. +// There’s no way around that though, except for injecting zero-width stuff. +// Do we need to safeguard against that? +function strong(node, _, context) { + var marker = checkStrong(context) + var exit = context.enter('strong') + var value = phrasing(node, context, {before: marker, after: marker}) + exit() + return marker + marker + value + marker + marker +} + +function strongPeek(node, _, context) { + return context.options.strong || '*' +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/text.js b/node_modules/mdast-util-to-markdown/lib/handle/text.js new file mode 100644 index 00000000..5145cc91 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/text.js @@ -0,0 +1,7 @@ +module.exports = text + +var safe = require('../util/safe') + +function text(node, parent, context, safeOptions) { + return safe(context, node.value, safeOptions) +} diff --git a/node_modules/mdast-util-to-markdown/lib/handle/thematic-break.js b/node_modules/mdast-util-to-markdown/lib/handle/thematic-break.js new file mode 100644 index 00000000..aec8bbdd --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/handle/thematic-break.js @@ -0,0 +1,14 @@ +module.exports = thematicBreak + +var repeat = require('repeat-string') +var checkRepeat = require('../util/check-rule-repeat') +var checkRule = require('../util/check-rule') + +function thematicBreak(node, parent, context) { + var value = repeat( + checkRule(context) + (context.options.ruleSpaces ? ' ' : ''), + checkRepeat(context) + ) + + return context.options.ruleSpaces ? value.slice(0, -1) : value +} diff --git a/node_modules/mdast-util-to-markdown/lib/index.js b/node_modules/mdast-util-to-markdown/lib/index.js new file mode 100644 index 00000000..1803d3c8 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/index.js @@ -0,0 +1,73 @@ +module.exports = toMarkdown + +var zwitch = require('zwitch') +var configure = require('./configure') +var defaultHandlers = require('./handle') +var defaultJoin = require('./join') +var defaultUnsafe = require('./unsafe') + +function toMarkdown(tree, options) { + var settings = options || {} + var context = { + enter: enter, + stack: [], + unsafe: [], + join: [], + handlers: {}, + options: {} + } + var result + + configure(context, { + unsafe: defaultUnsafe, + join: defaultJoin, + handlers: defaultHandlers + }) + configure(context, settings) + + if (context.options.tightDefinitions) { + context.join = [joinDefinition].concat(context.join) + } + + context.handle = zwitch('type', { + invalid: invalid, + unknown: unknown, + handlers: context.handlers + }) + + result = context.handle(tree, null, context, {before: '\n', after: '\n'}) + + if ( + result && + result.charCodeAt(result.length - 1) !== 10 && + result.charCodeAt(result.length - 1) !== 13 + ) { + result += '\n' + } + + return result + + function enter(name) { + context.stack.push(name) + return exit + + function exit() { + context.stack.pop() + } + } +} + +function invalid(value) { + throw new Error('Cannot handle value `' + value + '`, expected node') +} + +function unknown(node) { + throw new Error('Cannot handle unknown node `' + node.type + '`') +} + +function joinDefinition(left, right) { + // No blank line between adjacent definitions. + if (left.type === 'definition' && left.type === right.type) { + return 0 + } +} diff --git a/node_modules/mdast-util-to-markdown/lib/join.js b/node_modules/mdast-util-to-markdown/lib/join.js new file mode 100644 index 00000000..387af2f5 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/join.js @@ -0,0 +1,37 @@ +module.exports = [joinDefaults] + +var formatCodeAsIndented = require('./util/format-code-as-indented') +var formatHeadingAsSetext = require('./util/format-heading-as-setext') + +function joinDefaults(left, right, parent, context) { + if ( + // Two lists with the same marker. + (right.type === 'list' && + right.type === left.type && + Boolean(left.ordered) === Boolean(right.ordered)) || + // Indented code after list or another indented code. + (right.type === 'code' && + formatCodeAsIndented(right, context) && + (left.type === 'list' || + (left.type === right.type && formatCodeAsIndented(left, context)))) + ) { + return false + } + + // Join children of a list or an item. + // In which case, `parent` has a `spread` field. + if (typeof parent.spread === 'boolean') { + if ( + left.type === 'paragraph' && + // Two paragraphs. + (left.type === right.type || + right.type === 'definition' || + // Paragraph followed by a setext heading. + (right.type === 'heading' && formatHeadingAsSetext(right, context))) + ) { + return + } + + return parent.spread ? 1 : 0 + } +} diff --git a/node_modules/mdast-util-to-markdown/lib/unsafe.js b/node_modules/mdast-util-to-markdown/lib/unsafe.js new file mode 100644 index 00000000..7b90450b --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/unsafe.js @@ -0,0 +1,110 @@ +module.exports = [ + { + character: '\t', + inConstruct: ['codeFencedLangGraveAccent', 'codeFencedLangTilde'] + }, + { + character: '\r', + inConstruct: [ + 'codeFencedLangGraveAccent', + 'codeFencedLangTilde', + 'codeFencedMetaGraveAccent', + 'codeFencedMetaTilde', + 'destinationLiteral', + 'headingAtx' + ] + }, + { + character: '\n', + inConstruct: [ + 'codeFencedLangGraveAccent', + 'codeFencedLangTilde', + 'codeFencedMetaGraveAccent', + 'codeFencedMetaTilde', + 'destinationLiteral', + 'headingAtx' + ] + }, + { + character: ' ', + inConstruct: ['codeFencedLangGraveAccent', 'codeFencedLangTilde'] + }, + // An exclamation mark can start an image, if it is followed by a link or + // a link reference. + {character: '!', after: '\\[', inConstruct: 'phrasing'}, + // A quote can break out of a title. + {character: '"', inConstruct: 'titleQuote'}, + // A number sign could start an ATX heading if it starts a line. + {atBreak: true, character: '#'}, + {character: '#', inConstruct: 'headingAtx', after: '(?:[\r\n]|$)'}, + // Dollar sign and percentage are not used in markdown. + // An ampersand could start a character reference. + {character: '&', after: '[#A-Za-z]', inConstruct: 'phrasing'}, + // An apostrophe can break out of a title. + {character: "'", inConstruct: 'titleApostrophe'}, + // A left paren could break out of a destination raw. + {character: '(', inConstruct: 'destinationRaw'}, + {before: '\\]', character: '(', inConstruct: 'phrasing'}, + // A right paren could start a list item or break out of a destination + // raw. + {atBreak: true, before: '\\d+', character: ')'}, + {character: ')', inConstruct: 'destinationRaw'}, + // An asterisk can start thematic breaks, list items, emphasis, strong. + {atBreak: true, character: '*'}, + {character: '*', inConstruct: 'phrasing'}, + // A plus sign could start a list item. + {atBreak: true, character: '+'}, + // A dash can start thematic breaks, list items, and setext heading + // underlines. + {atBreak: true, character: '-'}, + // A dot could start a list item. + {atBreak: true, before: '\\d+', character: '.', after: '(?:[ \t\r\n]|$)'}, + // Slash, colon, and semicolon are not used in markdown for constructs. + // A less than can start html (flow or text) or an autolink. + // HTML could start with an exclamation mark (declaration, cdata, comment), + // slash (closing tag), question mark (instruction), or a letter (tag). + // An autolink also starts with a letter. + // Finally, it could break out of a destination literal. + {atBreak: true, character: '<', after: '[!/?A-Za-z]'}, + {character: '<', after: '[!/?A-Za-z]', inConstruct: 'phrasing'}, + {character: '<', inConstruct: 'destinationLiteral'}, + // An equals to can start setext heading underlines. + {atBreak: true, character: '='}, + // A greater than can start block quotes and it can break out of a + // destination literal. + {atBreak: true, character: '>'}, + {character: '>', inConstruct: 'destinationLiteral'}, + // Question mark and at sign are not used in markdown for constructs. + // A left bracket can start definitions, references, labels, + {atBreak: true, character: '['}, + {character: '[', inConstruct: ['phrasing', 'label', 'reference']}, + // A backslash can start an escape (when followed by punctuation) or a + // hard break (when followed by an eol). + // Note: typical escapes are handled in `safe`! + {character: '\\', after: '[\\r\\n]', inConstruct: 'phrasing'}, + // A right bracket can exit labels. + { + character: ']', + inConstruct: ['label', 'reference'] + }, + // Caret is not used in markdown for constructs. + // An underscore can start emphasis, strong, or a thematic break. + {atBreak: true, character: '_'}, + {before: '[^A-Za-z]', character: '_', inConstruct: 'phrasing'}, + {character: '_', after: '[^A-Za-z]', inConstruct: 'phrasing'}, + // A grave accent can start code (fenced or text), or it can break out of + // a grave accent code fence. + {atBreak: true, character: '`'}, + { + character: '`', + inConstruct: [ + 'codeFencedLangGraveAccent', + 'codeFencedMetaGraveAccent', + 'phrasing' + ] + }, + // Left brace, vertical bar, right brace are not used in markdown for + // constructs. + // A tilde can start code (fenced). + {atBreak: true, character: '~'} +] diff --git a/node_modules/mdast-util-to-markdown/lib/util/association.js b/node_modules/mdast-util-to-markdown/lib/util/association.js new file mode 100644 index 00000000..7352ca6f --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/association.js @@ -0,0 +1,30 @@ +module.exports = association + +var decode = require('parse-entities/decode-entity') + +var characterEscape = /\\([!-/:-@[-`{-~])/g +var characterReference = /&(#(\d{1,7}|x[\da-f]{1,6})|[\da-z]{1,31});/gi + +// The `label` of an association is the string value: character escapes and +// references work, and casing is intact. +// The `identifier` is used to match one association to another: controversially, +// character escapes and references don’t work in this matching: `©` does +// not match `©`, and `\+` does not match `+`. +// But casing is ignored (and whitespace) is trimmed and collapsed: ` A\nb` +// matches `a b`. +// So, we do prefer the label when figuring out how we’re going to serialize: +// it has whitespace, casing, and we can ignore most useless character escapes +// and all character references. +function association(node) { + if (node.label || !node.identifier) { + return node.label || '' + } + + return node.identifier + .replace(characterEscape, '$1') + .replace(characterReference, decodeIfPossible) +} + +function decodeIfPossible($0, $1) { + return decode($1) || $0 +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/check-bullet.js b/node_modules/mdast-util-to-markdown/lib/util/check-bullet.js new file mode 100644 index 00000000..66621e84 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/check-bullet.js @@ -0,0 +1,15 @@ +module.exports = checkBullet + +function checkBullet(context) { + var marker = context.options.bullet || '*' + + if (marker !== '*' && marker !== '+' && marker !== '-') { + throw new Error( + 'Cannot serialize items with `' + + marker + + '` for `options.bullet`, expected `*`, `+`, or `-`' + ) + } + + return marker +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/check-emphasis.js b/node_modules/mdast-util-to-markdown/lib/util/check-emphasis.js new file mode 100644 index 00000000..2a5e0e30 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/check-emphasis.js @@ -0,0 +1,15 @@ +module.exports = checkEmphasis + +function checkEmphasis(context) { + var marker = context.options.emphasis || '*' + + if (marker !== '*' && marker !== '_') { + throw new Error( + 'Cannot serialize emphasis with `' + + marker + + '` for `options.emphasis`, expected `*`, or `_`' + ) + } + + return marker +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/check-fence.js b/node_modules/mdast-util-to-markdown/lib/util/check-fence.js new file mode 100644 index 00000000..e4970ac2 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/check-fence.js @@ -0,0 +1,15 @@ +module.exports = checkFence + +function checkFence(context) { + var marker = context.options.fence || '`' + + if (marker !== '`' && marker !== '~') { + throw new Error( + 'Cannot serialize code with `' + + marker + + '` for `options.fence`, expected `` ` `` or `~`' + ) + } + + return marker +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/check-list-item-indent.js b/node_modules/mdast-util-to-markdown/lib/util/check-list-item-indent.js new file mode 100644 index 00000000..4664a47b --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/check-list-item-indent.js @@ -0,0 +1,19 @@ +module.exports = checkListItemIndent + +function checkListItemIndent(context) { + var style = context.options.listItemIndent || 'tab' + + if (style === 1 || style === '1') { + return 'one' + } + + if (style !== 'tab' && style !== 'one' && style !== 'mixed') { + throw new Error( + 'Cannot serialize items with `' + + style + + '` for `options.listItemIndent`, expected `tab`, `one`, or `mixed`' + ) + } + + return style +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/check-quote.js b/node_modules/mdast-util-to-markdown/lib/util/check-quote.js new file mode 100644 index 00000000..4a618a5e --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/check-quote.js @@ -0,0 +1,15 @@ +module.exports = checkQuote + +function checkQuote(context) { + var marker = context.options.quote || '"' + + if (marker !== '"' && marker !== "'") { + throw new Error( + 'Cannot serialize title with `' + + marker + + '` for `options.quote`, expected `"`, or `\'`' + ) + } + + return marker +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/check-rule-repeat.js b/node_modules/mdast-util-to-markdown/lib/util/check-rule-repeat.js new file mode 100644 index 00000000..71ae53e0 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/check-rule-repeat.js @@ -0,0 +1,15 @@ +module.exports = checkRule + +function checkRule(context) { + var repetition = context.options.ruleRepetition || 3 + + if (repetition < 3) { + throw new Error( + 'Cannot serialize rules with repetition `' + + repetition + + '` for `options.ruleRepetition`, expected `3` or more' + ) + } + + return repetition +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/check-rule.js b/node_modules/mdast-util-to-markdown/lib/util/check-rule.js new file mode 100644 index 00000000..48064efd --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/check-rule.js @@ -0,0 +1,15 @@ +module.exports = checkRule + +function checkRule(context) { + var marker = context.options.rule || '*' + + if (marker !== '*' && marker !== '-' && marker !== '_') { + throw new Error( + 'Cannot serialize rules with `' + + marker + + '` for `options.rule`, expected `*`, `-`, or `_`' + ) + } + + return marker +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/check-strong.js b/node_modules/mdast-util-to-markdown/lib/util/check-strong.js new file mode 100644 index 00000000..e2c3f075 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/check-strong.js @@ -0,0 +1,15 @@ +module.exports = checkStrong + +function checkStrong(context) { + var marker = context.options.strong || '*' + + if (marker !== '*' && marker !== '_') { + throw new Error( + 'Cannot serialize strong with `' + + marker + + '` for `options.strong`, expected `*`, or `_`' + ) + } + + return marker +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/container-flow.js b/node_modules/mdast-util-to-markdown/lib/util/container-flow.js new file mode 100644 index 00000000..d894e1ca --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/container-flow.js @@ -0,0 +1,47 @@ +module.exports = flow + +var repeat = require('repeat-string') + +function flow(parent, context) { + var children = parent.children || [] + var results = [] + var index = -1 + var child + + while (++index < children.length) { + child = children[index] + + results.push( + context.handle(child, parent, context, {before: '\n', after: '\n'}) + ) + + if (index + 1 < children.length) { + results.push(between(child, children[index + 1])) + } + } + + return results.join('') + + function between(left, right) { + var index = -1 + var result + + while (++index < context.join.length) { + result = context.join[index](left, right, parent, context) + + if (result === true || result === 1) { + break + } + + if (typeof result === 'number') { + return repeat('\n', 1 + Number(result)) + } + + if (result === false) { + return '\n\n\n\n' + } + } + + return '\n\n' + } +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/container-phrasing.js b/node_modules/mdast-util-to-markdown/lib/util/container-phrasing.js new file mode 100644 index 00000000..5a9c45d6 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/container-phrasing.js @@ -0,0 +1,57 @@ +module.exports = phrasing + +function phrasing(parent, context, safeOptions) { + var children = parent.children || [] + var results = [] + var index = -1 + var before = safeOptions.before + var after + var handle + var child + + while (++index < children.length) { + child = children[index] + + if (index + 1 < children.length) { + handle = context.handle.handlers[children[index + 1].type] + if (handle && handle.peek) handle = handle.peek + after = handle + ? handle(children[index + 1], parent, context, { + before: '', + after: '' + }).charAt(0) + : '' + } else { + after = safeOptions.after + } + + // In some cases, html (text) can be found in phrasing right after an eol. + // When we’d serialize that, in most cases that would be seen as html + // (flow). + // As we can’t escape or so to prevent it from happening, we take a somewhat + // reasonable approach: replace that eol with a space. + // See: + if ( + results.length > 0 && + (before === '\r' || before === '\n') && + child.type === 'html' + ) { + results[results.length - 1] = results[results.length - 1].replace( + /(\r?\n|\r)$/, + ' ' + ) + before = ' ' + } + + results.push( + context.handle(child, parent, context, { + before: before, + after: after + }) + ) + + before = results[results.length - 1].slice(-1) + } + + return results.join('') +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/format-code-as-indented.js b/node_modules/mdast-util-to-markdown/lib/util/format-code-as-indented.js new file mode 100644 index 00000000..127353be --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/format-code-as-indented.js @@ -0,0 +1,14 @@ +module.exports = formatCodeAsIndented + +function formatCodeAsIndented(node, context) { + return ( + !context.options.fences && + node.value && + // If there’s no info… + !node.lang && + // And there’s a non-whitespace character… + /[^ \r\n]/.test(node.value) && + // And the value doesn’t start or end in a blank… + !/^[\t ]*(?:[\r\n]|$)|(?:^|[\r\n])[\t ]*$/.test(node.value) + ) +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/format-heading-as-setext.js b/node_modules/mdast-util-to-markdown/lib/util/format-heading-as-setext.js new file mode 100644 index 00000000..3caf43d4 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/format-heading-as-setext.js @@ -0,0 +1,9 @@ +module.exports = formatHeadingAsSetext + +var toString = require('mdast-util-to-string') + +function formatHeadingAsSetext(node, context) { + return ( + context.options.setext && (!node.depth || node.depth < 3) && toString(node) + ) +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/format-link-as-autolink.js b/node_modules/mdast-util-to-markdown/lib/util/format-link-as-autolink.js new file mode 100644 index 00000000..1354f35e --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/format-link-as-autolink.js @@ -0,0 +1,26 @@ +module.exports = formatLinkAsAutolink + +var toString = require('mdast-util-to-string') + +function formatLinkAsAutolink(node, context) { + var raw = toString(node) + + return ( + !context.options.resourceLink && + // If there’s a url… + node.url && + // And there’s a no title… + !node.title && + // And the content of `node` is a single text node… + node.children && + node.children.length === 1 && + node.children[0].type === 'text' && + // And if the url is the same as the content… + (raw === node.url || 'mailto:' + raw === node.url) && + // And that starts w/ a protocol… + /^[a-z][a-z+.-]+:/i.test(node.url) && + // And that doesn’t contain ASCII control codes (character escapes and + // references don’t work) or angle brackets… + !/[\0- <>\u007F]/.test(node.url) + ) +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/indent-lines.js b/node_modules/mdast-util-to-markdown/lib/util/indent-lines.js new file mode 100644 index 00000000..02b4dd83 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/indent-lines.js @@ -0,0 +1,25 @@ +module.exports = indentLines + +var eol = /\r?\n|\r/g + +function indentLines(value, map) { + var result = [] + var start = 0 + var line = 0 + var match + + while ((match = eol.exec(value))) { + one(value.slice(start, match.index)) + result.push(match[0]) + start = match.index + match[0].length + line++ + } + + one(value.slice(start)) + + return result.join('') + + function one(value) { + result.push(map(value, line, !value)) + } +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/pattern-compile.js b/node_modules/mdast-util-to-markdown/lib/util/pattern-compile.js new file mode 100644 index 00000000..2c61e4ee --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/pattern-compile.js @@ -0,0 +1,25 @@ +module.exports = patternCompile + +function patternCompile(pattern) { + var before + var after + + if (!pattern._compiled) { + before = pattern.before ? '(?:' + pattern.before + ')' : '' + after = pattern.after ? '(?:' + pattern.after + ')' : '' + + if (pattern.atBreak) { + before = '[\\r\\n][\\t ]*' + before + } + + pattern._compiled = new RegExp( + (before ? '(' + before + ')' : '') + + (/[|\\{}()[\]^$+*?.-]/.test(pattern.character) ? '\\' : '') + + pattern.character + + (after || ''), + 'g' + ) + } + + return pattern._compiled +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/pattern-in-scope.js b/node_modules/mdast-util-to-markdown/lib/util/pattern-in-scope.js new file mode 100644 index 00000000..3d0049cc --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/pattern-in-scope.js @@ -0,0 +1,30 @@ +module.exports = patternInScope + +function patternInScope(stack, pattern) { + return ( + listInScope(stack, pattern.inConstruct, true) && + !listInScope(stack, pattern.notInConstruct) + ) +} + +function listInScope(stack, list, none) { + var index + + if (!list) { + return none + } + + if (typeof list === 'string') { + list = [list] + } + + index = -1 + + while (++index < list.length) { + if (stack.indexOf(list[index]) !== -1) { + return true + } + } + + return false +} diff --git a/node_modules/mdast-util-to-markdown/lib/util/safe.js b/node_modules/mdast-util-to-markdown/lib/util/safe.js new file mode 100644 index 00000000..e6b80857 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/lib/util/safe.js @@ -0,0 +1,139 @@ +module.exports = safe + +var patternCompile = require('./pattern-compile') +var patternInScope = require('./pattern-in-scope') + +function safe(context, input, config) { + var value = (config.before || '') + (input || '') + (config.after || '') + var positions = [] + var result = [] + var infos = {} + var index = -1 + var before + var after + var position + var pattern + var expression + var match + var start + var end + + while (++index < context.unsafe.length) { + pattern = context.unsafe[index] + + if (!patternInScope(context.stack, pattern)) { + continue + } + + expression = patternCompile(pattern) + + while ((match = expression.exec(value))) { + before = 'before' in pattern || pattern.atBreak + after = 'after' in pattern + + position = match.index + (before ? match[1].length : 0) + + if (positions.indexOf(position) === -1) { + positions.push(position) + infos[position] = {before: before, after: after} + } else { + if (infos[position].before && !before) { + infos[position].before = false + } + + if (infos[position].after && !after) { + infos[position].after = false + } + } + } + } + + positions.sort(numerical) + + start = config.before ? config.before.length : 0 + end = value.length - (config.after ? config.after.length : 0) + index = -1 + + while (++index < positions.length) { + position = positions[index] + + if ( + // Character before or after matched: + position < start || + position >= end + ) { + continue + } + + // If this character is supposed to be escaped because it has a condition on + // the next character, and the next character is definitly being escaped, + // then skip this escape. + if ( + position + 1 < end && + positions[index + 1] === position + 1 && + infos[position].after && + !infos[position + 1].before && + !infos[position + 1].after + ) { + continue + } + + if (start !== position) { + // If we have to use a character reference, an ampersand would be more + // correct, but as backslashes only care about punctuation, either will + // do the trick + result.push(escapeBackslashes(value.slice(start, position), '\\')) + } + + start = position + + if ( + /[!-/:-@[-`{-~]/.test(value.charAt(position)) && + (!config.encode || config.encode.indexOf(value.charAt(position)) === -1) + ) { + // Character escape. + result.push('\\') + } else { + // Character reference. + result.push( + '&#x' + value.charCodeAt(position).toString(16).toUpperCase() + ';' + ) + start++ + } + } + + result.push(escapeBackslashes(value.slice(start, end), config.after)) + + return result.join('') +} + +function numerical(a, b) { + return a - b +} + +function escapeBackslashes(value, after) { + var expression = /\\(?=[!-/:-@[-`{-~])/g + var positions = [] + var results = [] + var index = -1 + var start = 0 + var whole = value + after + var match + + while ((match = expression.exec(whole))) { + positions.push(match.index) + } + + while (++index < positions.length) { + if (start !== positions[index]) { + results.push(value.slice(start, positions[index])) + } + + results.push('\\') + start = positions[index] + } + + results.push(value.slice(start)) + + return results.join('') +} diff --git a/node_modules/mdast-util-to-markdown/license b/node_modules/mdast-util-to-markdown/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mdast-util-to-markdown/package.json b/node_modules/mdast-util-to-markdown/package.json new file mode 100644 index 00000000..90cb99e9 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/package.json @@ -0,0 +1,96 @@ +{ + "name": "mdast-util-to-markdown", + "version": "0.6.5", + "description": "mdast utility to serialize markdown", + "license": "MIT", + "keywords": [ + "unist", + "mdast", + "mdast-util", + "util", + "utility", + "markdown", + "markup", + "serialize", + "stringify", + "compile", + "syntax", + "tree", + "ast" + ], + "repository": "syntax-tree/mdast-util-to-markdown", + "bugs": "https://github.com/syntax-tree/mdast-util-to-markdown/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "index.js", + "lib/", + "types/index.d.ts" + ], + "types": "types", + "dependencies": { + "@types/unist": "^2.0.0", + "longest-streak": "^2.0.0", + "mdast-util-to-string": "^2.0.0", + "parse-entities": "^2.0.0", + "repeat-string": "^1.0.0", + "zwitch": "^1.0.0" + }, + "devDependencies": { + "browserify": "^17.0.0", + "dtslint": "^4.0.0", + "mdast-util-from-markdown": "^0.8.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "tinyify": "^3.0.0", + "unist-util-remove-position": "^3.0.0", + "xo": "^0.37.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "build": "browserify . -s mdastUtilToMarkdown -p tinyify > mdast-util-to-markdown.min.js", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test.js", + "test-types": "dtslint types", + "test": "npm run format && npm run build && npm run test-coverage && npm run test-types" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "rules": { + "complexity": "off", + "unicorn/prefer-includes": "off" + }, + "ignores": [ + "types/" + ] + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/mdast-util-to-markdown/readme.md b/node_modules/mdast-util-to-markdown/readme.md new file mode 100644 index 00000000..d0cc615b --- /dev/null +++ b/node_modules/mdast-util-to-markdown/readme.md @@ -0,0 +1,312 @@ +# mdast-util-to-markdown + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +**[mdast][]** utility to parse markdown. + +## Install + +[npm][]: + +```sh +npm install mdast-util-to-markdown +``` + +## Use + +Say we have the following script, `example.js`: + +```js +var toMarkdown = require('mdast-util-to-markdown') + +var tree = { + type: 'root', + children: [ + { + type: 'blockquote', + children: [ + {type: 'thematicBreak'}, + { + type: 'paragraph', + children: [ + {type: 'text', value: '- a\nb !'}, + { + type: 'link', + url: 'example.com', + children: [{type: 'text', value: 'd'}] + } + ] + } + ] + } + ] +} + +console.log(toMarkdown(tree)) +``` + +Now, running `node example` yields (note the properly escaped characters which +would otherwise turn into a list and image respectively): + +```markdown +> *** +> +> \- a +> b \![d](example.com) +``` + +## API + +### `toMarkdown(tree[, options])` + +Serialize **[mdast][]** to markdown. + +##### Formatting options + +###### `options.bullet` + +Marker to use to for bullets of items in unordered lists (`'*'`, `'+'`, or `'-'`, +default: `'*'`). + +###### `options.closeAtx` + +Whether to add the same number of number signs (`#`) at the end of an ATX +heading as the opening sequence (`boolean`, default: `false`). + +###### `options.emphasis` + +Marker to use to serialize emphasis (`'*'` or `'_'`, default: `'*'`). + +###### `options.fence` + +Marker to use to serialize fenced code (``'`'`` or `'~'`, default: ``'`'``). + +###### `options.fences` + +Whether to use fenced code always (`boolean`, default: `false`). +The default is to fenced code if there is a language defined, if the code is +empty, or if it starts or ends in empty lines. + +###### `options.incrementListMarker` + +Whether to increment the value of bullets of items in ordered lists (`boolean`, +default: `true`). + +###### `options.listItemIndent` + +Whether to indent the content of list items with the size of the bullet plus one +space (when `'one'`) or a tab stop (`'tab'`), or depending on the item and its +parent list (`'mixed'`, uses `'one'` if the item and list are tight and `'tab'` +otherwise) (`'one'`, `'tab'`, or `'mixed'`, default: `'tab'`). + +###### `options.quote` + +Marker to use to serialize titles (`'"'` or `"'"`, default: `'"'`). + +###### `options.resourceLink` + +Whether to use reference links always (`boolean`, default: `false`). +The default is to use autolinks (``) when possible. + +###### `options.rule` + +Marker to use for thematic breaks (`'*'`, `'-'`, or `'_'`, default: `'*'`). + +###### `options.ruleRepetition` + +Number of markers to use for thematic breaks (`number`, default: +`3`, min: `3`). + +###### `options.ruleSpaces` + +Whether to add spaces between markers in thematic breaks (`boolean`, default: +`false`). + +###### `options.setext` + +Whether to use setext headings when possible (`boolean`, default: `false`). +Setext headings are not possible for headings with a rank more than 2 or when +they’re empty. + +###### `options.strong` + +Marker to use to serialize strong (`'*'` or `'_'`, default: `'*'`). + +###### `options.tightDefinitions` + +Whether to join definitions w/o a blank line (`boolean`, default: `false`). +Shortcut for a join function like so: + +```js +function (left, right) { + if (left.type === 'definition' && right.type === 'definition') { + return 0 + } +} +``` + +###### `options.handlers` + +Object mapping node types to custom handlers. +Useful for syntax extensions. +Take a look at [`lib/handle`][handlers] for examples. + +###### `options.join` + +List of functions used to determine what to place between two flow nodes. +Often, they are joined by one blank line. +In certain cases, it’s nicer to have them next to each other. +Or, they can’t occur together. +These functions receive two adjacent nodes and their parent and can return +`number` or `boolean`, referring to how many blank lines to use between them. +A return value of `true` is as passing `1`. +A return value of `false` means the nodes cannot be joined by a blank line, such +as two adjacent block quotes or indented code after a list, in which case a +comment will be injected to break them up: + +```markdown +> Quote 1 + + + +> Quote 2 +``` + +###### `options.unsafe` + +List of patterns to escape. +Useful for syntax extensions. +Take a look at [`lib/unsafe.js`][unsafe] for examples. + +##### Extension options + +###### `options.extensions` + +List of extensions (`Array.`). +Each `ToMarkdownExtension` is an object with the same interface as `options` +here. + +##### Returns + +`string` — Serialized markdown. + +## List of extensions + +* [`syntax-tree/mdast-util-directive`](https://github.com/syntax-tree/mdast-util-directive) + — serialize directives +* [`syntax-tree/mdast-util-footnote`](https://github.com/syntax-tree/mdast-util-footnote) + — serialize footnotes +* [`syntax-tree/mdast-util-frontmatter`](https://github.com/syntax-tree/mdast-util-frontmatter) + — serialize frontmatter (YAML, TOML, more) +* [`syntax-tree/mdast-util-gfm`](https://github.com/syntax-tree/mdast-util-gfm) + — serialize GFM +* [`syntax-tree/mdast-util-gfm-autolink-literal`](https://github.com/syntax-tree/mdast-util-gfm-autolink-literal) + — serialize GFM autolink literals +* [`syntax-tree/mdast-util-gfm-strikethrough`](https://github.com/syntax-tree/mdast-util-gfm-strikethrough) + — serialize GFM strikethrough +* [`syntax-tree/mdast-util-gfm-table`](https://github.com/syntax-tree/mdast-util-gfm-table) + — serialize GFM tables +* [`syntax-tree/mdast-util-gfm-task-list-item`](https://github.com/syntax-tree/mdast-util-gfm-task-list-item) + — serialize GFM task list items +* [`syntax-tree/mdast-util-math`](https://github.com/syntax-tree/mdast-util-math) + — serialize math +* [`syntax-tree/mdast-util-mdx`](https://github.com/syntax-tree/mdast-util-mdx) + — serialize MDX or MDX.js +* [`syntax-tree/mdast-util-mdx-expression`](https://github.com/syntax-tree/mdast-util-mdx-expression) + — serialize MDX or MDX.js expressions +* [`syntax-tree/mdast-util-mdx-jsx`](https://github.com/syntax-tree/mdast-util-mdx-jsx) + — serialize MDX or MDX.js JSX +* [`syntax-tree/mdast-util-mdxjs-esm`](https://github.com/syntax-tree/mdast-util-mdxjs-esm) + — serialize MDX.js ESM + +## Security + +`mdast-util-to-markdown` will do its best to serialize markdown to match the +syntax tree, but there are several cases where that is impossible. +It’ll do its best, but complete roundtripping is impossible given that any value +could be injected into the tree. + +As Markdown is sometimes used for HTML, and improper use of HTML can open you up +to a [cross-site scripting (XSS)][xss] attack, use of `mdast-util-to-markdown` +and parsing it again later could potentially be unsafe. +When parsing markdown afterwards and then going to HTML, use something like +[`hast-util-sanitize`][sanitize] to make the tree safe. + +## Related + +* [`micromark/micromark`](https://github.com/micromark/micromark) + — the smallest commonmark-compliant markdown parser that exists +* [`remarkjs/remark`](https://github.com/remarkjs/remark) + — markdown processor powered by plugins +* [`syntax-tree/mdast-util-from-markdown`](https://github.com/syntax-tree/mdast-util-from-markdown) + — parse markdown to mdast + +## Contribute + +See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/syntax-tree/mdast-util-to-markdown/workflows/main/badge.svg + +[build]: https://github.com/syntax-tree/mdast-util-to-markdown/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/mdast-util-to-markdown.svg + +[coverage]: https://codecov.io/github/syntax-tree/mdast-util-to-markdown + +[downloads-badge]: https://img.shields.io/npm/dm/mdast-util-to-markdown.svg + +[downloads]: https://www.npmjs.com/package/mdast-util-to-markdown + +[size-badge]: https://img.shields.io/bundlephobia/minzip/mdast-util-to-markdown.svg + +[size]: https://bundlephobia.com/result?p=mdast-util-to-markdown + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/syntax-tree/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md + +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md + +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md + +[mdast]: https://github.com/syntax-tree/mdast + +[xss]: https://en.wikipedia.org/wiki/Cross-site_scripting + +[sanitize]: https://github.com/syntax-tree/hast-util-sanitize + +[handlers]: lib/handle + +[unsafe]: lib/unsafe.js diff --git a/node_modules/mdast-util-to-markdown/types/index.d.ts b/node_modules/mdast-util-to-markdown/types/index.d.ts new file mode 100644 index 00000000..47e40299 --- /dev/null +++ b/node_modules/mdast-util-to-markdown/types/index.d.ts @@ -0,0 +1,82 @@ +// Minimum TypeScript Version: 3.0 +import {Node, Parent} from 'unist' + +export = toMarkdown + +declare namespace toMarkdown { + interface SafeOptions { + before: string + after: string + } + + type Handle = ( + node: Node, + parent: Parent | null | undefined, + context: Context, + safeOptions: SafeOptions + ) => string + + interface Context { + stack: string[] + enter: (type: string) => () => void + options: Options + unsafe: Unsafe[] + join: Join[] + handle: Handle + } + + interface Handlers { + [key: string]: Handler + } + + interface Handler { + peek?: Handle + ( + node: Node, + parent: Parent | null | undefined, + context: Context, + safeOptions: SafeOptions + ): string + } + + interface Unsafe { + character: string + inConstruct?: string | string[] + notInConstruct?: string | string[] + after?: string + before?: string + atBreak?: boolean + } + + type Join = ( + left: Node, + right: Node, + parent: Parent, + context: Context + ) => boolean | null | void + + interface Options { + bullet?: '-' | '*' | '+' + closeAtx?: boolean + emphasis?: '_' | '*' + fence?: '~' | '`' + fences?: boolean + incrementListMarker?: boolean + listItemIndent?: 'tab' | 'one' | 'mixed' + quote?: '"' | "'" + resourceLink?: boolean + rule?: '-' | '_' | '*' + ruleRepetition?: number + ruleSpaces?: boolean + setext?: boolean + strong?: '_' | '*' + tightDefinitions?: boolean + + extensions?: Options[] + handlers?: Handlers + join?: Join[] + unsafe?: Unsafe[] + } +} + +declare function toMarkdown(node: Node, options?: toMarkdown.Options): string diff --git a/node_modules/mdast-util-to-string/index.js b/node_modules/mdast-util-to-string/index.js new file mode 100644 index 00000000..0ae5f1db --- /dev/null +++ b/node_modules/mdast-util-to-string/index.js @@ -0,0 +1,29 @@ +'use strict' + +module.exports = toString + +// Get the text content of a node. +// Prefer the node’s plain-text fields, otherwise serialize its children, +// and if the given value is an array, serialize the nodes in it. +function toString(node) { + return ( + (node && + (node.value || + node.alt || + node.title || + ('children' in node && all(node.children)) || + ('length' in node && all(node)))) || + '' + ) +} + +function all(values) { + var result = [] + var index = -1 + + while (++index < values.length) { + result[index] = toString(values[index]) + } + + return result.join('') +} diff --git a/node_modules/unist-util-visit/license b/node_modules/mdast-util-to-string/license similarity index 100% rename from node_modules/unist-util-visit/license rename to node_modules/mdast-util-to-string/license diff --git a/node_modules/mdast-util-to-string/package.json b/node_modules/mdast-util-to-string/package.json new file mode 100644 index 00000000..124287e2 --- /dev/null +++ b/node_modules/mdast-util-to-string/package.json @@ -0,0 +1,80 @@ +{ + "name": "mdast-util-to-string", + "version": "2.0.0", + "description": "mdast utility to get the plain text content of a node", + "license": "MIT", + "keywords": [ + "unist", + "mdast", + "mdast-util", + "util", + "utility", + "markdown", + "node", + "string", + "serialize" + ], + "repository": "syntax-tree/mdast-util-to-string", + "bugs": "https://github.com/syntax-tree/mdast-util-to-string/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "index.js", + "types/index.d.ts" + ], + "types": "types/index.d.ts", + "devDependencies": { + "browserify": "^17.0.0", + "dtslint": "^4.0.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "tinyify": "^3.0.0", + "xo": "^0.34.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "build-bundle": "browserify . -s mdastUtilToString -o mdast-util-to-string.js", + "build-mangle": "browserify . -s mdastUtilToString -o mdast-util-to-string.min.js -p tinyify", + "build": "npm run build-bundle && npm run build-mangle", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test.js", + "test-types": "dtslint types", + "test": "npm run format && npm run build && npm run test-coverage && npm run test-types" + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "ignore": [ + "mdast-util-to-string.js", + "types/test.ts" + ] + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/mdast-util-to-string/readme.md b/node_modules/mdast-util-to-string/readme.md new file mode 100644 index 00000000..2b7f1a0e --- /dev/null +++ b/node_modules/mdast-util-to-string/readme.md @@ -0,0 +1,127 @@ +# mdast-util-to-string + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +**[mdast][]** utility to get the plain text content of a node. + +## Install + +[npm][]: + +```sh +npm install mdast-util-to-string +``` + +## Use + +```js +var unified = require('unified') +var parse = require('remark-parse') +var toString = require('mdast-util-to-string') + +var tree = unified() + .use(parse) + .parse('Some _emphasis_, **importance**, and `code`.') + +console.log(toString(tree)) // => 'Some emphasis, importance, and code.' +``` + +## API + +### `toString(node)` + +Get the text content of a [node][] or list of nodes. + +The algorithm checks `value` of `node`, then `alt`, and finally `title`. +If no value is found, the algorithm checks the children of `node` and joins them +(without spaces or newlines). + +> This is not a markdown to plain-text library. +> Use [`strip-markdown`][strip-markdown] for that. + +## Security + +Use of `mdast-util-to-string` does not involve **[hast][]**, user content, or +change the tree, so there are no openings for [cross-site scripting (XSS)][xss] +attacks. + +## Related + +* [`nlcst-to-string`](https://github.com/syntax-tree/nlcst-to-string) + — Get text content in nlcst +* [`hast-util-to-string`](https://github.com/wooorm/rehype-minify/tree/HEAD/packages/hast-util-to-string) + — Get text content in hast +* [`hast-util-to-text`](https://github.com/syntax-tree/hast-util-to-text) + — Get text content in hast according to the `innerText` algorithm +* [`hast-util-from-string`](https://github.com/wooorm/rehype-minify/tree/HEAD/packages/hast-util-from-string) + — Set text content in hast + +## Contribute + +See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/syntax-tree/mdast-util-to-string/workflows/main/badge.svg + +[build]: https://github.com/syntax-tree/mdast-util-to-string/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/mdast-util-to-string.svg + +[coverage]: https://codecov.io/github/syntax-tree/mdast-util-to-string + +[downloads-badge]: https://img.shields.io/npm/dm/mdast-util-to-string.svg + +[downloads]: https://www.npmjs.com/package/mdast-util-to-string + +[size-badge]: https://img.shields.io/bundlephobia/minzip/mdast-util-to-string.svg + +[size]: https://bundlephobia.com/result?p=mdast-util-to-string + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/syntax-tree/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md + +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md + +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md + +[mdast]: https://github.com/syntax-tree/mdast + +[node]: https://github.com/syntax-tree/mdast#nodes + +[strip-markdown]: https://github.com/remarkjs/strip-markdown + +[xss]: https://en.wikipedia.org/wiki/Cross-site_scripting + +[hast]: https://github.com/syntax-tree/hast diff --git a/node_modules/mdast-util-to-string/types/index.d.ts b/node_modules/mdast-util-to-string/types/index.d.ts new file mode 100644 index 00000000..39b41dbb --- /dev/null +++ b/node_modules/mdast-util-to-string/types/index.d.ts @@ -0,0 +1,8 @@ +// Minimum TypeScript Version: 3.0 +import {Node} from 'unist' + +declare namespace mdastToString {} + +declare function mdastToString(node: Node | Node[]): string + +export = mdastToString diff --git a/node_modules/micromark-extension-footnote/html.js b/node_modules/micromark-extension-footnote/html.js new file mode 100644 index 00000000..988fe6cb --- /dev/null +++ b/node_modules/micromark-extension-footnote/html.js @@ -0,0 +1,168 @@ +var normalizeIdentifier = require('micromark/dist/util/normalize-identifier') + +exports.enter = { + footnoteDefinition: enterFootnoteDefinition, + footnoteDefinitionLabelString: buffer, + footnoteCallString: buffer, + inlineNoteText: enterNoteText +} +exports.exit = { + footnoteDefinition: exitFootnoteDefinition, + footnoteDefinitionLabelString: exitFootnoteDefinitionLabelString, + footnoteCallString: exitFootnoteCallString, + inlineNoteText: exitNoteText, + null: exitDocument +} + +var own = {}.hasOwnProperty + +function buffer() { + this.buffer() +} + +function exitFootnoteDefinitionLabelString(token) { + var stack = this.getData('footnoteDefinitionStack') + + if (!stack) this.setData('footnoteDefinitionStack', (stack = [])) + + stack.push(normalizeIdentifier(this.sliceSerialize(token))) + this.resume() // Drop the label. + this.buffer() // Get ready for a value. +} + +function enterFootnoteDefinition() { + this.getData('tightStack').push(false) +} + +function exitFootnoteDefinition() { + var definitions = this.getData('footnoteDefinitions') + var stack = this.getData('footnoteDefinitionStack') + var current = stack.pop() + var value = this.resume() + + if (!definitions) this.setData('footnoteDefinitions', (definitions = {})) + if (!own.call(definitions, current)) definitions[current] = value + + this.getData('tightStack').pop() + this.setData('slurpOneLineEnding', true) + // “Hack” to prevent a line ending from showing up if we’re in a definition in + // an empty list item. + this.setData('lastWasTag') +} + +function exitFootnoteCallString(token) { + var calls = this.getData('footnoteCallOrder') + var id = normalizeIdentifier(this.sliceSerialize(token)) + var index + var counter + + this.resume() + + if (!calls) this.setData('footnoteCallOrder', (calls = [])) + + index = calls.indexOf(id) + + if (index === -1) { + calls.push(id) + counter = calls.length + } else { + counter = index + 1 + } + + createCall.call(this, String(counter)) +} + +function exitDocument() { + var calls = this.getData('footnoteCallOrder') || [] + var definitions = this.getData('footnoteDefinitions') || {} + var notes = this.getData('inlineNotes') || {} + var index = -1 + var length = calls.length + var value + var id + var injected + var back + var counter + + if (length) { + this.lineEndingIfNeeded() + this.tag('
') + this.lineEndingIfNeeded() + this.tag('
') + this.lineEndingIfNeeded() + this.tag('
    ') + } + + while (++index < length) { + // Called definitions are always defined. + id = calls[index] + counter = String(index + 1) + injected = false + back = '↩︎' + value = (typeof id === 'number' ? notes : definitions)[id].replace( + /<\/p>(?:\r?\n|\r)?$/, + injectBack + ) + + this.lineEndingIfNeeded() + this.tag('
  1. ') + this.lineEndingIfNeeded() + this.raw(value) + + if (!injected) { + this.lineEndingIfNeeded() + this.tag(back) + } + + this.lineEndingIfNeeded() + this.tag('
  2. ') + } + + if (length) { + this.lineEndingIfNeeded() + this.tag('
') + this.lineEndingIfNeeded() + this.tag('
') + } + + function injectBack($0) { + injected = true + return back + $0 + } +} + +function enterNoteText() { + var counter = (this.getData('inlineNoteCounter') || 0) + 1 + var stack = this.getData('inlineNoteStack') + var calls = this.getData('footnoteCallOrder') + + if (!stack) this.setData('inlineNoteStack', (stack = [])) + if (!calls) this.setData('footnoteCallOrder', (calls = [])) + + stack.push(counter) + calls.push(counter) + this.setData('inlineNoteCounter', counter) + this.buffer() +} + +function exitNoteText() { + var counter = this.getData('inlineNoteStack').pop() + var notes = this.getData('inlineNotes') + + if (!notes) this.setData('inlineNotes', (notes = {})) + + notes[counter] = '

' + this.resume() + '

' + createCall.call(this, String(counter)) +} + +function createCall(counter) { + this.tag( + '' + ) + this.raw(counter) + this.tag('') +} diff --git a/node_modules/micromark-extension-footnote/index.js b/node_modules/micromark-extension-footnote/index.js new file mode 100644 index 00000000..4cb1be0c --- /dev/null +++ b/node_modules/micromark-extension-footnote/index.js @@ -0,0 +1,442 @@ +module.exports = footnote + +var normalizeIdentifier = require('micromark/dist/util/normalize-identifier') +var blank = require('micromark/dist/tokenize/partial-blank-line') +var createSpace = require('micromark/dist/tokenize/factory-space') +var chunkedSplice = require('micromark/dist/util/chunked-splice') +var prefixSize = require('micromark/dist/util/prefix-size') +var shallow = require('micromark/dist/util/shallow') +var resolveAll = require('micromark/dist/util/resolve-all') + +var indent = {tokenize: tokenizeIndent, partial: true} + +function footnote(options) { + var settings = options || {} + var call = {tokenize: tokenizeFootnoteCall} + var noteStart = {tokenize: tokenizeNoteStart, resolveAll: resolveAllNote} + var noteEnd = { + add: 'after', + tokenize: tokenizeNoteEnd, + resolveAll: resolveAllNote, + resolveTo: resolveToNoteEnd + } + var definition = { + tokenize: tokenizeDefinitionStart, + continuation: {tokenize: tokenizeDefinitionContinuation}, + exit: footnoteDefinitionEnd + } + var text = {91: call} + + if (settings.inlineNotes) { + text[93] = noteEnd + text[94] = noteStart + } + + return { + _hiddenFootnoteSupport: {}, + document: {91: definition}, + text: text + } +} + +// Remove remaining note starts. +function resolveAllNote(events) { + var length = events.length + var index = -1 + var token + + while (++index < length) { + token = events[index][1] + + if (events[index][0] === 'enter' && token.type === 'inlineNoteStart') { + token.type = 'data' + // Remove the two marker (`^[`). + events.splice(index + 1, 4) + length -= 4 + } + } + + return events +} + +function resolveToNoteEnd(events, context) { + var index = events.length - 4 + var group + var text + var token + var type + var openIndex + + // Find an opening. + while (index--) { + token = events[index][1] + + // Find where the note starts. + if (events[index][0] === 'enter' && token.type === 'inlineNoteStart') { + openIndex = index + type = 'inlineNote' + break + } + } + + group = { + type: type, + start: shallow(events[openIndex][1].start), + end: shallow(events[events.length - 1][1].end) + } + + text = { + type: 'inlineNoteText', + start: shallow(events[openIndex + 4][1].end), + end: shallow(events[events.length - 3][1].start) + } + + var note = [ + ['enter', group, context], + events[openIndex + 1], + events[openIndex + 2], + events[openIndex + 3], + events[openIndex + 4], + ['enter', text, context] + ] + + chunkedSplice( + note, + note.length, + 0, + resolveAll( + context.parser.constructs.insideSpan.null, + events.slice(openIndex + 6, -4), + context + ) + ) + + note.push( + ['exit', text, context], + events[events.length - 2], + events[events.length - 3], + ['exit', group, context] + ) + + chunkedSplice(events, index, events.length - index, note) + + return events +} + +function tokenizeFootnoteCall(effects, ok, nok) { + var self = this + var defined = self.parser.footnotes || (self.parser.footnotes = []) + var size = 0 + var data + + return start + + function start(code) { + // istanbul ignore next - Hooks. + if (code !== 91) return nok(code) + + effects.enter('footnoteCall') + effects.enter('footnoteCallLabelMarker') + effects.consume(code) + effects.exit('footnoteCallLabelMarker') + return callStart + } + + function callStart(code) { + if (code !== 94) return nok(code) + + effects.enter('footnoteCallMarker') + effects.consume(code) + effects.exit('footnoteCallMarker') + effects.enter('footnoteCallString') + effects.enter('chunkString').contentType = 'string' + return callData + } + + function callData(code) { + var token + + if (code === null || code === 91 || size++ > 999) { + return nok(code) + } + + if (code === 93) { + if (!data) { + return nok(code) + } + + effects.exit('chunkString') + token = effects.exit('footnoteCallString') + return defined.indexOf(normalizeIdentifier(self.sliceSerialize(token))) < + 0 + ? nok(code) + : end(code) + } + + effects.consume(code) + + if (!(code < 0 || code === 32)) { + data = true + } + + return code === 92 ? callEscape : callData + } + + function callEscape(code) { + if (code === 91 || code === 92 || code === 93) { + effects.consume(code) + size++ + return callData + } + + return callData(code) + } + + function end(code) { + // Always a `]`. + effects.enter('footnoteCallLabelMarker') + effects.consume(code) + effects.exit('footnoteCallLabelMarker') + effects.exit('footnoteCall') + return ok + } +} + +function tokenizeNoteStart(effects, ok, nok) { + return start + + function start(code) { + // istanbul ignore next - Hooks. + if (code !== 94) return nok(code) + + effects.enter('inlineNoteStart') + effects.enter('inlineNoteMarker') + effects.consume(code) + effects.exit('inlineNoteMarker') + return noteStart + } + + function noteStart(code) { + if (code !== 91) return nok(code) + + effects.enter('inlineNoteStartMarker') + effects.consume(code) + effects.exit('inlineNoteStartMarker') + effects.exit('inlineNoteStart') + return ok + } +} + +function tokenizeNoteEnd(effects, ok, nok) { + var self = this + + return start + + function start(code) { + var index = self.events.length + var hasStart + + // Find an opening. + while (index--) { + if (self.events[index][1].type === 'inlineNoteStart') { + hasStart = true + break + } + } + + // istanbul ignore next - Hooks. + if (code !== 93 || !hasStart) { + return nok(code) + } + + effects.enter('inlineNoteEnd') + effects.enter('inlineNoteEndMarker') + effects.consume(code) + effects.exit('inlineNoteEndMarker') + effects.exit('inlineNoteEnd') + return ok + } +} + +function tokenizeDefinitionStart(effects, ok, nok) { + var self = this + var defined = self.parser.footnotes || (self.parser.footnotes = []) + var identifier + var size = 0 + var data + + return start + + function start(code) { + /* istanbul ignore if - hooks. */ + if (code !== 91) { + return nok(code) + } + + effects.enter('footnoteDefinition')._container = true + effects.enter('footnoteDefinitionLabel') + effects.enter('footnoteDefinitionLabelMarker') + effects.consume(code) + effects.exit('footnoteDefinitionLabelMarker') + return labelStart + } + + function labelStart(code) { + // `^` + if (code !== 94) return nok(code) + + effects.enter('footnoteDefinitionMarker') + effects.consume(code) + effects.exit('footnoteDefinitionMarker') + effects.enter('footnoteDefinitionLabelString') + return atBreak + } + + function atBreak(code) { + var token + + if (code === null || code === 91 || size > 999) { + return nok(code) + } + + if (code === 93) { + if (!data) { + return nok(code) + } + + token = effects.exit('footnoteDefinitionLabelString') + identifier = normalizeIdentifier(self.sliceSerialize(token)) + effects.enter('footnoteDefinitionLabelMarker') + effects.consume(code) + effects.exit('footnoteDefinitionLabelMarker') + effects.exit('footnoteDefinitionLabel') + return labelAfter + } + + if (code === -5 || code === -4 || code === -3) { + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + size++ + return atBreak + } + + effects.enter('chunkString').contentType = 'string' + return label(code) + } + + function label(code) { + if ( + code === null || + code === -5 || + code === -4 || + code === -3 || + code === 91 || + code === 93 || + size > 999 + ) { + effects.exit('chunkString') + return atBreak(code) + } + + if (!(code < 0 || code === 32)) { + data = true + } + + size++ + effects.consume(code) + return code === 92 ? labelEscape : label + } + + function labelEscape(code) { + if (code === 91 || code === 92 || code === 93) { + effects.consume(code) + size++ + return label + } + + return label(code) + } + + function labelAfter(code) { + if (code !== 58) { + return nok(code) + } + + effects.enter('definitionMarker') + effects.consume(code) + effects.exit('definitionMarker') + return effects.check(blank, onBlank, nonBlank) + } + + function onBlank(code) { + self.containerState.initialBlankLine = true + return done(code) + } + + function nonBlank(code) { + // A space or tab. + if (code === -2 || code === -1 || code === 32) { + effects.enter('footnoteDefinitionWhitespace') + effects.consume(code) + effects.exit('footnoteDefinitionWhitespace') + return done(code) + } + + // No space is also fine, just like a block quote marker. + return done(code) + } + + function done(code) { + if (defined.indexOf(identifier) < 0) { + defined.push(identifier) + } + + return ok(code) + } +} + +function tokenizeDefinitionContinuation(effects, ok, nok) { + var self = this + + return effects.check(blank, onBlank, notBlank) + + // Continued blank lines are fine. + function onBlank(code) { + if (self.containerState.initialBlankLine) { + self.containerState.furtherBlankLines = true + } + + return ok(code) + } + + // If there were continued blank lines, or this isn’t indented at all. + function notBlank(code) { + if ( + self.containerState.furtherBlankLines || + !(code === -2 || code === -1 || code === 32) + ) { + return nok(code) + } + + self.containerState.initialBlankLine = undefined + self.containerState.furtherBlankLines = undefined + return effects.attempt(indent, ok, nok)(code) + } +} + +function footnoteDefinitionEnd(effects) { + effects.exit('footnoteDefinition') +} + +function tokenizeIndent(effects, ok, nok) { + var self = this + + return createSpace(effects, afterPrefix, 'footnoteDefinitionIndent', 5) + + function afterPrefix(code) { + return prefixSize(self.events, 'footnoteDefinitionIndent') === 4 + ? ok(code) + : nok(code) + } +} diff --git a/node_modules/micromark-extension-footnote/license b/node_modules/micromark-extension-footnote/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/micromark-extension-footnote/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/micromark-extension-footnote/package.json b/node_modules/micromark-extension-footnote/package.json new file mode 100644 index 00000000..a5016735 --- /dev/null +++ b/node_modules/micromark-extension-footnote/package.json @@ -0,0 +1,73 @@ +{ + "name": "micromark-extension-footnote", + "version": "0.3.2", + "description": "micromark extension to support footnotes", + "license": "MIT", + "keywords": [ + "micromark", + "micromark-extension", + "footnote", + "note", + "definition", + "markdown", + "unified" + ], + "repository": "micromark/micromark-extension-footnote", + "bugs": "https://github.com/micromark/micromark-extension-footnote/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "index.js", + "html.js" + ], + "dependencies": { + "micromark": "~2.11.0" + }, + "devDependencies": { + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "xo": "^0.36.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test/index.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "rules": { + "unicorn/prefer-includes": "off", + "unicorn/prefer-optional-catch-binding": "off" + } + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/micromark-extension-footnote/readme.md b/node_modules/micromark-extension-footnote/readme.md new file mode 100644 index 00000000..53429edb --- /dev/null +++ b/node_modules/micromark-extension-footnote/readme.md @@ -0,0 +1,211 @@ +# micromark-extension-footnote + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +**[micromark][]** extension to support footnotes. + +As there is no spec for footnotes in markdown, this extension stays as close to +references and list items in CommonMark, while being inspired by the HTML output +of Pandoc notes. + +This package provides the low-level modules for integrating with the micromark +tokenizer and the micromark HTML compiler. + +You probably shouldn’t use this package directly, but instead use +[`mdast-util-footnote`][mdast-util-footnote] with **[mdast][]** or +[`remark-footnotes`][remark-footnotes] with **[remark][]**. + +## Install + +[npm][]: + +```sh +npm install micromark-extension-footnote +``` + +## Use + +Say we have the following file, `example.md`: + +```markdown +Here is a footnote call,[^1] and another.[^longnote] + +[^1]: Here is the footnote. + +[^longnote]: Here’s one with multiple blocks. + + Subsequent paragraphs are indented to show that they +belong to the previous footnote. + + { some.code } + + The whole paragraph can be indented, or just the first + line. In this way, multi-paragraph footnotes work like + multi-paragraph list items. + +This paragraph won’t be part of the note, because it +isn’t indented. + +Here is an inline note.^[Inlines notes are easier to write, since +you don’t have to pick an identifier and move down to type the +note.] +``` + +And our script, `example.js`, looks as follows: + +```js +var fs = require('fs') +var micromark = require('micromark') +var footnote = require('micromark-extension-footnote') +var footnoteHtml = require('micromark-extension-footnote/html') + +var doc = fs.readFileSync('example.md') + +var result = micromark(doc, { + extensions: [footnote({inlineNotes: true})], + htmlExtensions: [footnoteHtml] +}) + +console.log(result) +``` + +Now, running `node example` yields: + +```html +

Here is a footnote call,1 and another.2

+

This paragraph won’t be part of the note, because it +isn’t indented.

+

Here is an inline note.1

+
+
+
    +
  1. +

    Here is the footnote.↩︎

    +
  2. +
  3. +

    Here’s one with multiple blocks.

    +

    Subsequent paragraphs are indented to show that they +belong to the previous footnote.

    +
    { some.code }
    +
    +

    The whole paragraph can be indented, or just the first +line. In this way, multi-paragraph footnotes work like +multi-paragraph list items.↩︎

    +
  4. +
  5. +

    Inlines notes are easier to write, since +you don’t have to pick an identifier and move down to type the +note.↩︎

    +
  6. +
+
+``` + +## API + +### `html` + +### `syntax(options?)` + +> Note: `syntax` is the default export of this module, `html` is available at +> `micromark-extension-footnote/html`. + +Support footnotes. +The export of `syntax` is a function that can be called with options and returns +an extension for the micromark parser (to tokenize footnotes; can be passed in +`extensions`). +The export of `html` is an extension for the default HTML compiler (to compile +as HTML; can be passed in `htmlExtensions`). + +###### `options.inlineNotes` + +Whether to support `^[inline notes]` (`boolean`, default: `false`). + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`remarkjs/remark-footnotes`][remark-footnotes] + — remark plugin to support footnotes +* [`syntax-tree/mdast-util-footnote`][mdast-util-footnote] + — mdast utility to support footnotes +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `micromark/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/micromark/micromark-extension-footnote/workflows/main/badge.svg + +[build]: https://github.com/micromark/micromark-extension-footnote/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/micromark/micromark-extension-footnote.svg + +[coverage]: https://codecov.io/github/micromark/micromark-extension-footnote + +[downloads-badge]: https://img.shields.io/npm/dm/micromark-extension-footnote.svg + +[downloads]: https://www.npmjs.com/package/micromark-extension-footnote + +[size-badge]: https://img.shields.io/bundlephobia/minzip/micromark-extension-footnote.svg + +[size]: https://bundlephobia.com/result?p=micromark-extension-footnote + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/micromark/micromark/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/micromark/.github/blob/HEAD/contributing.md + +[support]: https://github.com/micromark/.github/blob/HEAD/support.md + +[coc]: https://github.com/micromark/.github/blob/HEAD/code-of-conduct.md + +[micromark]: https://github.com/micromark/micromark + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[remark]: https://github.com/remarkjs/remark + +[mdast]: https://github.com/syntax-tree/mdast + +[mdast-util-footnote]: https://github.com/syntax-tree/mdast-util-footnote + +[remark-footnotes]: https://github.com/remarkjs/remark-footnotes diff --git a/node_modules/micromark-extension-frontmatter/html.js b/node_modules/micromark-extension-frontmatter/html.js new file mode 100644 index 00000000..227dd9ac --- /dev/null +++ b/node_modules/micromark-extension-frontmatter/html.js @@ -0,0 +1 @@ +module.exports = require('./lib/html') diff --git a/node_modules/micromark-extension-frontmatter/index.js b/node_modules/micromark-extension-frontmatter/index.js new file mode 100644 index 00000000..624255df --- /dev/null +++ b/node_modules/micromark-extension-frontmatter/index.js @@ -0,0 +1 @@ +module.exports = require('./lib/syntax') diff --git a/node_modules/micromark-extension-frontmatter/lib/html.js b/node_modules/micromark-extension-frontmatter/lib/html.js new file mode 100644 index 00000000..21ed8ddb --- /dev/null +++ b/node_modules/micromark-extension-frontmatter/lib/html.js @@ -0,0 +1,29 @@ +module.exports = create + +var matters = require('./matters') + +function create(options) { + var settings = matters(options) + var length = settings.length + var index = -1 + var matter + var enter = {} + var exit = {} + + while (++index < length) { + matter = settings[index] + enter[matter.type] = start + exit[matter.type] = end + } + + return {enter: enter, exit: exit} + + function start() { + this.buffer() + } + + function end() { + this.resume() + this.setData('slurpOneLineEnding', true) + } +} diff --git a/node_modules/remark-frontmatter/lib/matters.js b/node_modules/micromark-extension-frontmatter/lib/matters.js similarity index 78% rename from node_modules/remark-frontmatter/lib/matters.js rename to node_modules/micromark-extension-frontmatter/lib/matters.js index 543998d6..a2760301 100644 --- a/node_modules/remark-frontmatter/lib/matters.js +++ b/node_modules/micromark-extension-frontmatter/lib/matters.js @@ -1,30 +1,26 @@ -'use strict' +module.exports = matters var fault = require('fault') -module.exports = matters - var own = {}.hasOwnProperty -var markers = { - yaml: '-', - toml: '+' -} +var markers = {yaml: '-', toml: '+'} function matters(options) { + var settings = options || 'yaml' var results = [] var index = -1 var length // One preset or matter. - if (typeof options === 'string' || !('length' in options)) { - options = [options] + if (typeof settings === 'string' || !('length' in settings)) { + settings = [settings] } - length = options.length + length = settings.length while (++index < length) { - results[index] = matter(options[index]) + results[index] = matter(settings[index]) } return results diff --git a/node_modules/micromark-extension-frontmatter/lib/syntax.js b/node_modules/micromark-extension-frontmatter/lib/syntax.js new file mode 100644 index 00000000..3be31047 --- /dev/null +++ b/node_modules/micromark-extension-frontmatter/lib/syntax.js @@ -0,0 +1,167 @@ +module.exports = create + +var matters = require('./matters') + +function create(options) { + var settings = matters(options) + var length = settings.length + var index = -1 + var flow = {} + var matter + var code + + while (++index < length) { + matter = settings[index] + code = fence(matter, 'open').charCodeAt(0) + if (code in flow) { + flow[code].push(parse(matter)) + } else { + flow[code] = [parse(matter)] + } + } + + return {flow: flow} +} + +function parse(matter) { + var name = matter.type + var anywhere = matter.anywhere + var valueType = name + 'Value' + var fenceType = name + 'Fence' + var sequenceType = fenceType + 'Sequence' + var fenceConstruct = {tokenize: tokenizeFence, partial: true} + var buffer + + return {tokenize: tokenizeFrontmatter, concrete: true} + + function tokenizeFrontmatter(effects, ok, nok) { + var self = this + + return start + + function start(code) { + var position = self.now() + + if (position.column !== 1 || (!anywhere && position.line !== 1)) { + return nok(code) + } + + effects.enter(name) + buffer = fence(matter, 'open') + return effects.attempt(fenceConstruct, afterOpeningFence, nok)(code) + } + + function afterOpeningFence(code) { + buffer = fence(matter, 'close') + return lineEnd(code) + } + + function lineStart(code) { + if (code === -5 || code === -4 || code === -3 || code === null) { + return lineEnd(code) + } + + effects.enter(valueType) + return lineData(code) + } + + function lineData(code) { + if (code === -5 || code === -4 || code === -3 || code === null) { + effects.exit(valueType) + return lineEnd(code) + } + + effects.consume(code) + return lineData + } + + function lineEnd(code) { + // Require a closing fence. + if (code === null) { + return nok(code) + } + + // Can only be an eol. + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + return effects.attempt(fenceConstruct, after, lineStart) + } + + function after(code) { + effects.exit(name) + return ok(code) + } + } + + function tokenizeFence(effects, ok, nok) { + var bufferIndex = 0 + + return start + + function start(code) { + if (code === buffer.charCodeAt(bufferIndex)) { + effects.enter(fenceType) + effects.enter(sequenceType) + return insideSequence(code) + } + + return nok(code) + } + + function insideSequence(code) { + if (bufferIndex === buffer.length) { + effects.exit(sequenceType) + + if (code === -2 || code === -1 || code === 32) { + effects.enter('whitespace') + return insideWhitespace(code) + } + + return fenceEnd(code) + } + + if (code === buffer.charCodeAt(bufferIndex)) { + effects.consume(code) + bufferIndex++ + return insideSequence + } + + return nok(code) + } + + function insideWhitespace(code) { + if (code === -2 || code === -1 || code === 32) { + effects.consume(code) + return insideWhitespace + } + + effects.exit('whitespace') + return fenceEnd(code) + } + + function fenceEnd(code) { + if (code === -5 || code === -4 || code === -3 || code === null) { + effects.exit(fenceType) + return ok(code) + } + + return nok(code) + } + } +} + +function fence(matter, prop) { + var marker + + if (matter.marker) { + marker = pick(matter.marker, prop) + return marker + marker + marker + } + + return pick(matter.fence, prop) +} + +function pick(schema, prop) { + return typeof schema === 'string' ? schema : schema[prop] +} diff --git a/node_modules/micromark-extension-frontmatter/license b/node_modules/micromark-extension-frontmatter/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/micromark-extension-frontmatter/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/micromark-extension-frontmatter/package.json b/node_modules/micromark-extension-frontmatter/package.json new file mode 100644 index 00000000..e4198b6f --- /dev/null +++ b/node_modules/micromark-extension-frontmatter/package.json @@ -0,0 +1,72 @@ +{ + "name": "micromark-extension-frontmatter", + "version": "0.2.2", + "description": "micromark extension to support frontmatter (YAML, TOML, etc)", + "license": "MIT", + "keywords": [ + "micromark", + "micromark-extension", + "frontmatter", + "yaml", + "toml", + "gfm", + "markdown", + "unified" + ], + "repository": "micromark/micromark-extension-frontmatter", + "bugs": "https://github.com/micromark/micromark-extension-frontmatter/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "lib/", + "index.js", + "html.js" + ], + "dependencies": { + "fault": "^1.0.0" + }, + "devDependencies": { + "micromark": "~2.9.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^8.0.0", + "remark-preset-wooorm": "^7.0.0", + "tape": "^5.0.0", + "xo": "^0.33.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/micromark-extension-frontmatter/readme.md b/node_modules/micromark-extension-frontmatter/readme.md new file mode 100644 index 00000000..c15c07ff --- /dev/null +++ b/node_modules/micromark-extension-frontmatter/readme.md @@ -0,0 +1,203 @@ +# micromark-extension-frontmatter + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +**[micromark][]** extension to support frontmatter (YAML, TOML, etc). + +As there is no spec for frontmatter in markdown, this extension follows how YAML +frontmatter works on github.com. +For the HTML part, instead of rendering YAML, it is ignored. +Other types of frontmatter can be parsed, which will by default also work the +same as on github.com. + +This package provides the low-level modules for integrating with the micromark +tokenizer and the micromark HTML compiler. + +You probably shouldn’t use this package directly, but instead use +[`mdast-util-frontmatter`][mdast-util-frontmatter] with **[mdast][]** or +[`remark-frontmatter`][remark-frontmatter] with **[remark][]**. + +## Install + +[npm][]: + +```sh +npm install micromark-extension-frontmatter +``` + +## API + +### `html(options)` + +### `syntax(options)` + +> Note: `syntax` is the default export of this module, `html` is available at +> `micromark-extension-frontmatter/html`. + +Support frontmatter (YAML, TOML, and more). + +The exports are functions that can be called with options and return extensions +for the micromark parser (to tokenize frontmatter; can be passed in +`extensions`) and the default HTML compiler (to ignore frontmatter; can be +passed in `htmlExtensions`). + +##### `options` + +One [`preset`][preset] or [`Matter`][matter], or an array of them, defining all +the supported frontmatters (default: `'yaml'`). + +##### `preset` + +Either `'yaml'` or `'toml'`: + +* `'yaml'` — [`matter`][matter] defined as `{type: 'yaml', marker: '-'}` +* `'toml'` — [`matter`][matter] defined as `{type: 'toml', marker: '+'}` + +##### `Matter` + +An object with a `type` and either a `marker` or a `fence`: + +* `type` (`string`) + — Type to tokenize as +* `marker` (`string` or `{open: string, close: string}`) + — Character used to construct fences. + By providing an object with `open` and `close`. + different characters can be used for opening and closing fences. + For example the character `'-'` will result in `'---'` being used as the + fence +* `fence` (`string` or `{open: string, close: string}`) + — String used as the complete fence. + By providing an object with `open` and `close` different values can be used + for opening and closing fences. + This can be used too if fences contain different characters or lengths other + than 3 +* `anywhere` (`boolean`, default: `false`) + – if `true`, matter can be found anywhere in the document. + If `false` (default), only matter at the start of the document is recognized + +###### Example + +For `{type: 'yaml', marker: '-'}`: + +```yaml +--- +key: value +--- +``` + +For `{type: 'custom', marker: {open: '<', close: '>'}}`: + +```text +<<< +data +>>> +``` + +For `{type: 'custom', fence: '+=+=+=+'}`: + +```text ++=+=+=+ +data ++=+=+=+ +``` + +For `{type: 'json', fence: {open: '{', close: '}'}}`: + +```json +{ + "key": "value" +} +``` + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`remarkjs/remark-frontmatter`][remark-frontmatter] + — remark plugin to support frontmatter +* [`syntax-tree/mdast-util-frontmatter`][mdast-util-frontmatter] + — mdast utility to support frontmatter +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `micromark/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://img.shields.io/travis/micromark/micromark-extension-frontmatter.svg + +[build]: https://travis-ci.org/micromark/micromark-extension-frontmatter + +[coverage-badge]: https://img.shields.io/codecov/c/github/micromark/micromark-extension-frontmatter.svg + +[coverage]: https://codecov.io/github/micromark/micromark-extension-frontmatter + +[downloads-badge]: https://img.shields.io/npm/dm/micromark-extension-frontmatter.svg + +[downloads]: https://www.npmjs.com/package/micromark-extension-frontmatter + +[size-badge]: https://img.shields.io/bundlephobia/minzip/micromark-extension-frontmatter.svg + +[size]: https://bundlephobia.com/result?p=micromark-extension-frontmatter + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/micromark/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/micromark/.github/blob/HEAD/contributing.md + +[support]: https://github.com/micromark/.github/blob/HEAD/support.md + +[coc]: https://github.com/micromark/.github/blob/HEAD/code-of-conduct.md + +[micromark]: https://github.com/micromark/micromark + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[remark]: https://github.com/remarkjs/remark + +[mdast]: https://github.com/syntax-tree/mdast + +[mdast-util-frontmatter]: https://github.com/syntax-tree/mdast-util-frontmatter + +[remark-frontmatter]: https://github.com/remarkjs/remark-frontmatter + +[preset]: #preset + +[matter]: #matter diff --git a/node_modules/micromark-extension-gfm-autolink-literal/html.js b/node_modules/micromark-extension-gfm-autolink-literal/html.js new file mode 100644 index 00000000..099c4078 --- /dev/null +++ b/node_modules/micromark-extension-gfm-autolink-literal/html.js @@ -0,0 +1,28 @@ +var normalizeUri = require('micromark/dist/util/normalize-uri') + +exports.exit = { + literalAutolinkEmail: literalAutolinkEmail, + literalAutolinkHttp: literalAutolinkHttp, + literalAutolinkWww: literalAutolinkWww +} + +function literalAutolinkWww(token) { + return anchorFromToken.call(this, token, 'http://') +} + +function literalAutolinkEmail(token) { + return anchorFromToken.call(this, token, 'mailto:') +} + +function literalAutolinkHttp(token) { + return anchorFromToken.call(this, token) +} + +function anchorFromToken(token, protocol) { + var url = this.sliceSerialize(token) + this.tag( + '' + ) + this.raw(this.encode(url)) + this.tag('') +} diff --git a/node_modules/micromark-extension-gfm-autolink-literal/index.js b/node_modules/micromark-extension-gfm-autolink-literal/index.js new file mode 100644 index 00000000..b64479d1 --- /dev/null +++ b/node_modules/micromark-extension-gfm-autolink-literal/index.js @@ -0,0 +1 @@ +module.exports = require('./syntax') diff --git a/node_modules/micromark-extension-gfm-autolink-literal/license b/node_modules/micromark-extension-gfm-autolink-literal/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/micromark-extension-gfm-autolink-literal/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/micromark-extension-gfm-autolink-literal/package.json b/node_modules/micromark-extension-gfm-autolink-literal/package.json new file mode 100644 index 00000000..61ad03fe --- /dev/null +++ b/node_modules/micromark-extension-gfm-autolink-literal/package.json @@ -0,0 +1,76 @@ +{ + "name": "micromark-extension-gfm-autolink-literal", + "version": "0.5.7", + "description": "micromark extension to support GFM autolink literals", + "license": "MIT", + "keywords": [ + "micromark", + "micromark-extension", + "literal", + "url", + "autolink", + "auto", + "link", + "gfm", + "markdown", + "unified" + ], + "repository": "micromark/micromark-extension-gfm-autolink-literal", + "bugs": "https://github.com/micromark/micromark-extension-gfm-autolink-literal/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "index.js", + "html.js", + "syntax.js" + ], + "dependencies": { + "micromark": "~2.11.3" + }, + "devDependencies": { + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "xo": "^0.38.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test/index.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "rules": { + "unicorn/no-this-assignment": "off" + } + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/micromark-extension-gfm-autolink-literal/readme.md b/node_modules/micromark-extension-gfm-autolink-literal/readme.md new file mode 100644 index 00000000..ff0e54a1 --- /dev/null +++ b/node_modules/micromark-extension-gfm-autolink-literal/readme.md @@ -0,0 +1,133 @@ +# micromark-extension-gfm-autolink-literal + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +**[micromark][]** extension to support GitHub flavored markdown [literal +autolinks][]. + +This syntax extension matches the GFM spec and how literal autolinks work +in several places on github.com. +Do note that GH employs two algorithms to autolink: one at parse time, +one at compile time (similar to how @mentions are done at compile time). +This difference can be observed because character references and escapes +are handled differently. +But also because issues/PRs/comments omit (perhaps by accident?) the second +algorithm for `www.`, `http://`, and `https://` links (but not for email links). + +As this is a syntax extension, it focuses on the first algorithm. +The `html` part of this extension does not operate on an AST and hence can’t +perform the second algorithm. +`mdast-util-gfm-autolink-literal` adds support for the second. + +This package provides the low-level modules for integrating with the micromark +tokenizer and the micromark HTML compiler. + +You probably should use this package with +[`mdast-util-gfm-autolink-literal`][mdast-util-gfm-autolink-literal]. + +## Install + +[npm][]: + +```sh +npm install micromark-extension-gfm-autolink-literal +``` + +## API + +### `html` + +### `syntax` + +> Note: `syntax` is the default export of this module, `html` is available at +> `micromark-extension-gfm-autolink-literal/html`. + +Support [literal autolinks][]. +The exports are extensions for the micromark parser (to tokenize; can be passed +in `extensions`) and the default HTML compiler (to compile as `` elements; +can be passed in `htmlExtensions`). + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`syntax-tree/mdast-util-gfm-autolink-literal`](https://github.com/syntax-tree/mdast-util-gfm-autolink-literal) + — mdast utility to support autolink literals +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `micromark/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/micromark/micromark-extension-gfm-autolink-literal/workflows/main/badge.svg + +[build]: https://github.com/micromark/micromark-extension-gfm-autolink-literal/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/micromark/micromark-extension-gfm-autolink-literal.svg + +[coverage]: https://codecov.io/github/micromark/micromark-extension-gfm-autolink-literal + +[downloads-badge]: https://img.shields.io/npm/dm/micromark-extension-gfm-autolink-literal.svg + +[downloads]: https://www.npmjs.com/package/micromark-extension-gfm-autolink-literal + +[size-badge]: https://img.shields.io/bundlephobia/minzip/micromark-extension-gfm-autolink-literal.svg + +[size]: https://bundlephobia.com/result?p=micromark-extension-gfm-autolink-literal + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/micromark/micromark/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/micromark/.github/blob/HEAD/contributing.md + +[support]: https://github.com/micromark/.github/blob/HEAD/support.md + +[coc]: https://github.com/micromark/.github/blob/HEAD/code-of-conduct.md + +[micromark]: https://github.com/micromark/micromark + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[remark]: https://github.com/remarkjs/remark + +[mdast-util-gfm-autolink-literal]: https://github.com/syntax-tree/mdast-util-gfm-autolink-literal + +[literal autolinks]: https://github.github.com/gfm/#autolinks-extension- diff --git a/node_modules/micromark-extension-gfm-autolink-literal/syntax.js b/node_modules/micromark-extension-gfm-autolink-literal/syntax.js new file mode 100644 index 00000000..863ed6c7 --- /dev/null +++ b/node_modules/micromark-extension-gfm-autolink-literal/syntax.js @@ -0,0 +1,581 @@ +var asciiAlpha = require('micromark/dist/character/ascii-alpha') +var asciiAlphanumeric = require('micromark/dist/character/ascii-alphanumeric') +var asciiControl = require('micromark/dist/character/ascii-control') +var markdownLineEnding = require('micromark/dist/character/markdown-line-ending') +var unicodePunctuation = require('micromark/dist/character/unicode-punctuation') +var unicodeWhitespace = require('micromark/dist/character/unicode-whitespace') + +var www = {tokenize: tokenizeWww, partial: true} +var domain = {tokenize: tokenizeDomain, partial: true} +var path = {tokenize: tokenizePath, partial: true} +var punctuation = {tokenize: tokenizePunctuation, partial: true} +var namedCharacterReference = { + tokenize: tokenizeNamedCharacterReference, + partial: true +} + +var wwwAutolink = {tokenize: tokenizeWwwAutolink, previous: previousWww} +var httpAutolink = {tokenize: tokenizeHttpAutolink, previous: previousHttp} +var emailAutolink = {tokenize: tokenizeEmailAutolink, previous: previousEmail} + +var text = {} + +// Export hooked constructs. +exports.text = text + +// `0` +var code = 48 + +// While the code is smaller than `{`. +while (code < 123) { + text[code] = emailAutolink + code++ + // Jump from `:` -> `A` + if (code === 58) code = 65 + // Jump from `[` -> `a` + else if (code === 91) code = 97 +} + +// `+` +text[43] = emailAutolink +// `-` +text[45] = emailAutolink +// `.` +text[46] = emailAutolink +// `_` +text[95] = emailAutolink +// `h`. +text[72] = [emailAutolink, httpAutolink] +text[104] = [emailAutolink, httpAutolink] +// `w`. +text[87] = [emailAutolink, wwwAutolink] +text[119] = [emailAutolink, wwwAutolink] + +function tokenizeEmailAutolink(effects, ok, nok) { + var self = this + var hasDot + + return start + + function start(code) { + /* istanbul ignore next - hooks. */ + if ( + !gfmAtext(code) || + !previousEmail(self.previous) || + previous(self.events) + ) { + return nok(code) + } + + effects.enter('literalAutolink') + effects.enter('literalAutolinkEmail') + return atext(code) + } + + function atext(code) { + if (gfmAtext(code)) { + effects.consume(code) + return atext + } + + // `@` + if (code === 64) { + effects.consume(code) + return label + } + + return nok(code) + } + + function label(code) { + // `.` + if (code === 46) { + return effects.check(punctuation, done, dotContinuation)(code) + } + + if ( + // `-` + code === 45 || + // `_` + code === 95 + ) { + return effects.check(punctuation, nok, dashOrUnderscoreContinuation)(code) + } + + if (asciiAlphanumeric(code)) { + effects.consume(code) + return label + } + + return done(code) + } + + function dotContinuation(code) { + effects.consume(code) + hasDot = true + return label + } + + function dashOrUnderscoreContinuation(code) { + effects.consume(code) + return afterDashOrUnderscore + } + + function afterDashOrUnderscore(code) { + // `.` + if (code === 46) { + return effects.check(punctuation, nok, dotContinuation)(code) + } + + return label(code) + } + + function done(code) { + if (hasDot) { + effects.exit('literalAutolinkEmail') + effects.exit('literalAutolink') + return ok(code) + } + + return nok(code) + } +} + +function tokenizeWwwAutolink(effects, ok, nok) { + var self = this + + return start + + function start(code) { + /* istanbul ignore next - hooks. */ + if ( + (code !== 87 && code - 32 !== 87) || + !previousWww(self.previous) || + previous(self.events) + ) { + return nok(code) + } + + effects.enter('literalAutolink') + effects.enter('literalAutolinkWww') + // For `www.` we check instead of attempt, because when it matches, GH + // treats it as part of a domain (yes, it says a valid domain must come + // after `www.`, but that’s not how it’s implemented by them). + return effects.check( + www, + effects.attempt(domain, effects.attempt(path, done), nok), + nok + )(code) + } + + function done(code) { + effects.exit('literalAutolinkWww') + effects.exit('literalAutolink') + return ok(code) + } +} + +function tokenizeHttpAutolink(effects, ok, nok) { + var self = this + + return start + + function start(code) { + /* istanbul ignore next - hooks. */ + if ( + (code !== 72 && code - 32 !== 72) || + !previousHttp(self.previous) || + previous(self.events) + ) { + return nok(code) + } + + effects.enter('literalAutolink') + effects.enter('literalAutolinkHttp') + effects.consume(code) + return t1 + } + + function t1(code) { + // `t` + if (code === 84 || code - 32 === 84) { + effects.consume(code) + return t2 + } + + return nok(code) + } + + function t2(code) { + // `t` + if (code === 84 || code - 32 === 84) { + effects.consume(code) + return p + } + + return nok(code) + } + + function p(code) { + // `p` + if (code === 80 || code - 32 === 80) { + effects.consume(code) + return s + } + + return nok(code) + } + + function s(code) { + // `s` + if (code === 83 || code - 32 === 83) { + effects.consume(code) + return colon + } + + return colon(code) + } + + function colon(code) { + // `:` + if (code === 58) { + effects.consume(code) + return slash1 + } + + return nok(code) + } + + function slash1(code) { + // `/` + if (code === 47) { + effects.consume(code) + return slash2 + } + + return nok(code) + } + + function slash2(code) { + // `/` + if (code === 47) { + effects.consume(code) + return after + } + + return nok(code) + } + + function after(code) { + return asciiControl(code) || + unicodeWhitespace(code) || + unicodePunctuation(code) + ? nok(code) + : effects.attempt(domain, effects.attempt(path, done), nok)(code) + } + + function done(code) { + effects.exit('literalAutolinkHttp') + effects.exit('literalAutolink') + return ok(code) + } +} + +function tokenizeWww(effects, ok, nok) { + return start + + function start(code) { + // Assume a `w`. + effects.consume(code) + return w2 + } + + function w2(code) { + // `w` + if (code === 87 || code - 32 === 87) { + effects.consume(code) + return w3 + } + + return nok(code) + } + + function w3(code) { + // `w` + if (code === 87 || code - 32 === 87) { + effects.consume(code) + return dot + } + + return nok(code) + } + + function dot(code) { + // `.` + if (code === 46) { + effects.consume(code) + return after + } + + return nok(code) + } + + function after(code) { + return code === null || markdownLineEnding(code) ? nok(code) : ok(code) + } +} + +function tokenizeDomain(effects, ok, nok) { + var hasUnderscoreInLastSegment + var hasUnderscoreInLastLastSegment + + return domain + + function domain(code) { + // `&` + if (code === 38) { + return effects.check( + namedCharacterReference, + done, + punctuationContinuation + )(code) + } + + if (code === 46 /* `.` */ || code === 95 /* `_` */) { + return effects.check(punctuation, done, punctuationContinuation)(code) + } + + // GH documents that only alphanumerics (other than `-`, `.`, and `_`) can + // occur, which sounds like ASCII only, but they also support `www.點看.com`, + // so that’s Unicode. + // Instead of some new production for Unicode alphanumerics, markdown + // already has that for Unicode punctuation and whitespace, so use those. + if ( + asciiControl(code) || + unicodeWhitespace(code) || + (code !== 45 /* `-` */ && unicodePunctuation(code)) + ) { + return done(code) + } + + effects.consume(code) + return domain + } + + function punctuationContinuation(code) { + // `.` + if (code === 46) { + hasUnderscoreInLastLastSegment = hasUnderscoreInLastSegment + hasUnderscoreInLastSegment = undefined + effects.consume(code) + return domain + } + + // `_` + if (code === 95) hasUnderscoreInLastSegment = true + + effects.consume(code) + return domain + } + + function done(code) { + if (!hasUnderscoreInLastLastSegment && !hasUnderscoreInLastSegment) { + return ok(code) + } + + return nok(code) + } +} + +function tokenizePath(effects, ok) { + var balance = 0 + + return inPath + + function inPath(code) { + // `&` + if (code === 38) { + return effects.check( + namedCharacterReference, + ok, + continuedPunctuation + )(code) + } + + // `(` + if (code === 40) { + balance++ + } + + // `)` + if (code === 41) { + return effects.check( + punctuation, + parenAtPathEnd, + continuedPunctuation + )(code) + } + + if (pathEnd(code)) { + return ok(code) + } + + if (trailingPunctuation(code)) { + return effects.check(punctuation, ok, continuedPunctuation)(code) + } + + effects.consume(code) + return inPath + } + + function continuedPunctuation(code) { + effects.consume(code) + return inPath + } + + function parenAtPathEnd(code) { + balance-- + return balance < 0 ? ok(code) : continuedPunctuation(code) + } +} + +function tokenizeNamedCharacterReference(effects, ok, nok) { + return start + + function start(code) { + // Assume an ampersand. + effects.consume(code) + return inside + } + + function inside(code) { + if (asciiAlpha(code)) { + effects.consume(code) + return inside + } + + // `;` + if (code === 59) { + effects.consume(code) + return after + } + + return nok(code) + } + + function after(code) { + // If the named character reference is followed by the end of the path, it’s + // not continued punctuation. + return pathEnd(code) ? ok(code) : nok(code) + } +} + +function tokenizePunctuation(effects, ok, nok) { + return start + + function start(code) { + // Always a valid trailing punctuation marker. + effects.consume(code) + return after + } + + function after(code) { + // Check the next. + if (trailingPunctuation(code)) { + effects.consume(code) + return after + } + + // If the punctuation marker is followed by the end of the path, it’s not + // continued punctuation. + return pathEnd(code) ? ok(code) : nok(code) + } +} + +function trailingPunctuation(code) { + return ( + // `!` + code === 33 || + // `"` + code === 34 || + // `'` + code === 39 || + // `)` + code === 41 || + // `*` + code === 42 || + // `,` + code === 44 || + // `.` + code === 46 || + // `:` + code === 58 || + // `;` + code === 59 || + // `<` + code === 60 || + // `?` + code === 63 || + // `_`. + code === 95 || + // `~` + code === 126 + ) +} + +function pathEnd(code) { + return ( + // EOF. + code === null || + // CR, LF, CRLF, HT, VS. + code < 0 || + // Space. + code === 32 || + // `<` + code === 60 + ) +} + +function gfmAtext(code) { + return ( + code === 43 /* `+` */ || + code === 45 /* `-` */ || + code === 46 /* `.` */ || + code === 95 /* `_` */ || + asciiAlphanumeric(code) + ) +} + +function previousWww(code) { + return ( + code === null || + code < 0 || + code === 32 /* ` ` */ || + code === 40 /* `(` */ || + code === 42 /* `*` */ || + code === 95 /* `_` */ || + code === 126 /* `~` */ + ) +} + +function previousHttp(code) { + return code === null || !asciiAlpha(code) +} + +function previousEmail(code) { + return code !== 47 /* `/` */ && previousHttp(code) +} + +function previous(events) { + var index = events.length + + while (index--) { + if ( + (events[index][1].type === 'labelLink' || + events[index][1].type === 'labelImage') && + !events[index][1]._balanced + ) { + return true + } + } +} diff --git a/node_modules/micromark-extension-gfm-strikethrough/html.js b/node_modules/micromark-extension-gfm-strikethrough/html.js new file mode 100644 index 00000000..7f55b284 --- /dev/null +++ b/node_modules/micromark-extension-gfm-strikethrough/html.js @@ -0,0 +1,10 @@ +exports.enter = {strikethrough: onenterstrikethrough} +exports.exit = {strikethrough: onexitstrikethrough} + +function onenterstrikethrough() { + this.tag('') +} + +function onexitstrikethrough() { + this.tag('') +} diff --git a/node_modules/micromark-extension-gfm-strikethrough/index.js b/node_modules/micromark-extension-gfm-strikethrough/index.js new file mode 100644 index 00000000..be7b7ccc --- /dev/null +++ b/node_modules/micromark-extension-gfm-strikethrough/index.js @@ -0,0 +1,160 @@ +module.exports = create + +var classifyCharacter = require('micromark/dist/util/classify-character') +var chunkedSplice = require('micromark/dist/util/chunked-splice') +var resolveAll = require('micromark/dist/util/resolve-all') +var shallow = require('micromark/dist/util/shallow') + +function create(options) { + var settings = options || {} + var single = settings.singleTilde + var tokenizer = { + tokenize: tokenizeStrikethrough, + resolveAll: resolveAllStrikethrough + } + + if (single === null || single === undefined) { + single = true + } + + return {text: {126: tokenizer}, insideSpan: {null: tokenizer}} + + // Take events and resolve strikethrough. + function resolveAllStrikethrough(events, context) { + var index = -1 + var strikethrough + var text + var open + var nextEvents + + // Walk through all events. + while (++index < events.length) { + // Find a token that can close. + if ( + events[index][0] === 'enter' && + events[index][1].type === 'strikethroughSequenceTemporary' && + events[index][1]._close + ) { + open = index + + // Now walk back to find an opener. + while (open--) { + // Find a token that can open the closer. + if ( + events[open][0] === 'exit' && + events[open][1].type === 'strikethroughSequenceTemporary' && + events[open][1]._open && + // If the sizes are the same: + events[index][1].end.offset - events[index][1].start.offset === + events[open][1].end.offset - events[open][1].start.offset + ) { + events[index][1].type = 'strikethroughSequence' + events[open][1].type = 'strikethroughSequence' + + strikethrough = { + type: 'strikethrough', + start: shallow(events[open][1].start), + end: shallow(events[index][1].end) + } + + text = { + type: 'strikethroughText', + start: shallow(events[open][1].end), + end: shallow(events[index][1].start) + } + + // Opening. + nextEvents = [ + ['enter', strikethrough, context], + ['enter', events[open][1], context], + ['exit', events[open][1], context], + ['enter', text, context] + ] + + // Between. + chunkedSplice( + nextEvents, + nextEvents.length, + 0, + resolveAll( + context.parser.constructs.insideSpan.null, + events.slice(open + 1, index), + context + ) + ) + + // Closing. + chunkedSplice(nextEvents, nextEvents.length, 0, [ + ['exit', text, context], + ['enter', events[index][1], context], + ['exit', events[index][1], context], + ['exit', strikethrough, context] + ]) + + chunkedSplice(events, open - 1, index - open + 3, nextEvents) + + index = open + nextEvents.length - 2 + break + } + } + } + } + + return removeRemainingSequences(events) + } + + function removeRemainingSequences(events) { + var index = -1 + var length = events.length + + while (++index < length) { + if (events[index][1].type === 'strikethroughSequenceTemporary') { + events[index][1].type = 'data' + } + } + + return events + } + + function tokenizeStrikethrough(effects, ok, nok) { + var previous = this.previous + var events = this.events + var size = 0 + + return start + + function start(code) { + if ( + code !== 126 || + (previous === 126 && + events[events.length - 1][1].type !== 'characterEscape') + ) { + return nok(code) + } + + effects.enter('strikethroughSequenceTemporary') + return more(code) + } + + function more(code) { + var before = classifyCharacter(previous) + var token + var after + + if (code === 126) { + // If this is the third marker, exit. + if (size > 1) return nok(code) + effects.consume(code) + size++ + return more + } + + if (size < 2 && !single) return nok(code) + token = effects.exit('strikethroughSequenceTemporary') + after = classifyCharacter(code) + token._open = !after || (after === 2 && before) + token._close = !before || (before === 2 && after) + return ok(code) + } + } +} diff --git a/node_modules/micromark-extension-gfm-strikethrough/license b/node_modules/micromark-extension-gfm-strikethrough/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/micromark-extension-gfm-strikethrough/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/micromark-extension-gfm-strikethrough/package.json b/node_modules/micromark-extension-gfm-strikethrough/package.json new file mode 100644 index 00000000..a3a86786 --- /dev/null +++ b/node_modules/micromark-extension-gfm-strikethrough/package.json @@ -0,0 +1,81 @@ +{ + "name": "micromark-extension-gfm-strikethrough", + "version": "0.6.5", + "description": "micromark extension to support GFM strikethrough", + "license": "MIT", + "keywords": [ + "micromark", + "micromark-extension", + "strikethrough", + "strike", + "through", + "del", + "delete", + "deletion", + "gfm", + "markdown", + "unified" + ], + "repository": "micromark/micromark-extension-gfm-strikethrough", + "bugs": "https://github.com/micromark/micromark-extension-gfm-strikethrough/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "types": "types/index.d.ts", + "files": [ + "types/*.d.ts", + "lib/", + "index.js", + "html.js" + ], + "dependencies": { + "micromark": "~2.11.0" + }, + "devDependencies": { + "dtslint": "^4.0.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "xo": "^0.38.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test/index.js", + "test-types": "dtslint types", + "test": "npm run format && npm run test-coverage && npm run test-types" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "ignores": [ + "types" + ] + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/micromark-extension-gfm-strikethrough/readme.md b/node_modules/micromark-extension-gfm-strikethrough/readme.md new file mode 100644 index 00000000..a1271f19 --- /dev/null +++ b/node_modules/micromark-extension-gfm-strikethrough/readme.md @@ -0,0 +1,135 @@ +# micromark-extension-gfm-strikethrough + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +**[micromark][]** extension to support GitHub flavored markdown +[strikethrough][] (~~like this~~). +This syntax extension matches either the GFM spec (only two tildes work) or +github.com (both one or two tildes, when they match, work). + +This package provides the low-level modules for integrating with the micromark +tokenizer and the micromark HTML compiler. + +You probably shouldn’t use this package directly, but instead use +[`mdast-util-gfm-strikethrough`][mdast-util-gfm-strikethrough] with +**[mdast][]**. + +## Install + +[npm][]: + +```sh +npm install micromark-extension-gfm-strikethrough +``` + +## API + +### `html` + +### `syntax(options?)` + +> Note: `syntax` is the default export of this module, `html` is available at +> `micromark-extension-gfm-strikethrough/html`. + +Support strikethrough (~~like this~~). +The export of `syntax` is a function that can be called with options and returns +an extension for the micromark parser (to tokenize strikethrough; can be passed +in `extensions`). +The export of `html` is an extension for the default HTML compiler (to compile +as `` elements; can be passed in `htmlExtensions`). + +##### `options` + +###### `options.singleTilde` + +Whether to support strikethrough with a single tilde (`boolean`, default: +`true`). +Single tildes work on github.com, but are technically prohibited by the GFM +spec. + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`syntax-tree/mdast-util-gfm-strikethrough`][mdast-util-gfm-strikethrough] + — mdast utility to support strikethrough +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `micromark/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/micromark/micromark-extension-gfm-strikethrough/workflows/main/badge.svg + +[build]: https://github.com/micromark/micromark-extension-gfm-strikethrough/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/micromark/micromark-extension-gfm-strikethrough.svg + +[coverage]: https://codecov.io/github/micromark/micromark-extension-gfm-strikethrough + +[downloads-badge]: https://img.shields.io/npm/dm/micromark-extension-gfm-strikethrough.svg + +[downloads]: https://www.npmjs.com/package/micromark-extension-gfm-strikethrough + +[size-badge]: https://img.shields.io/bundlephobia/minzip/micromark-extension-gfm-strikethrough.svg + +[size]: https://bundlephobia.com/result?p=micromark-extension-gfm-strikethrough + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/micromark/micromark/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/micromark/.github/blob/HEAD/contributing.md + +[support]: https://github.com/micromark/.github/blob/HEAD/support.md + +[coc]: https://github.com/micromark/.github/blob/HEAD/code-of-conduct.md + +[micromark]: https://github.com/micromark/micromark + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[remark]: https://github.com/remarkjs/remark + +[mdast]: https://github.com/syntax-tree/mdast + +[mdast-util-gfm-strikethrough]: https://github.com/syntax-tree/mdast-util-gfm-strikethrough + +[strikethrough]: https://github.github.com/gfm/#strikethrough-extension- diff --git a/node_modules/micromark-extension-gfm-strikethrough/types/html.d.ts b/node_modules/micromark-extension-gfm-strikethrough/types/html.d.ts new file mode 100644 index 00000000..71378153 --- /dev/null +++ b/node_modules/micromark-extension-gfm-strikethrough/types/html.d.ts @@ -0,0 +1,8 @@ +import {HtmlExtension} from 'micromark/dist/shared-types' + +/** + * The export of `html` is an extension for the default HTML compiler (to + * compile as `` elements; can be passed in `htmlExtensions`). + */ +declare const html: HtmlExtension +export = html diff --git a/node_modules/micromark-extension-gfm-strikethrough/types/index.d.ts b/node_modules/micromark-extension-gfm-strikethrough/types/index.d.ts new file mode 100644 index 00000000..75e6b243 --- /dev/null +++ b/node_modules/micromark-extension-gfm-strikethrough/types/index.d.ts @@ -0,0 +1,26 @@ +// TypeScript Version: 4.0 + +import {SyntaxExtension} from 'micromark/dist/shared-types' + +/** + * Support strikethrough (~~like this~~). The export of `syntax` is a function + * that can be called with options and returns an extension for the micromark + * parser (to tokenize strikethrough; can be passed in `extensions`). + */ +declare function syntax( + options?: syntax.GfmStrikethroughOptions +): SyntaxExtension + +declare namespace syntax { + interface GfmStrikethroughOptions { + /** + * Whether to support strikethrough with a single tilde. Single tildes work + * on github.com, but are technically prohibited by the GFM spec. + * + * @default true + */ + singleTilde?: boolean + } +} + +export = syntax diff --git a/node_modules/micromark-extension-gfm-table/html.js b/node_modules/micromark-extension-gfm-table/html.js new file mode 100644 index 00000000..31d0852b --- /dev/null +++ b/node_modules/micromark-extension-gfm-table/html.js @@ -0,0 +1,138 @@ +var alignment = { + null: '', + left: ' align="left"', + right: ' align="right"', + center: ' align="center"' +} + +exports.enter = { + table: enterTable, + tableBody: enterBody, + tableData: enterTableData, + tableHead: enterHead, + tableHeader: enterTableHeader, + tableRow: enterRow +} +exports.exit = { + codeTextData: exitCodeTextData, + table: exitTable, + tableBody: exitBody, + tableData: exitTableData, + tableHead: exitHead, + tableHeader: exitTableHeader, + tableRow: exitRow +} + +function enterTable(token) { + this.lineEndingIfNeeded() + this.tag('') + this.setData('tableAlign', token._align) +} + +function exitTable() { + this.setData('tableAlign') + // If there was no table body, make sure the slurping from the delimiter row + // is cleared. + this.setData('slurpAllLineEndings') + this.lineEndingIfNeeded() + this.tag('
') +} + +function enterHead() { + this.lineEndingIfNeeded() + this.tag('
` +elements; can be passed in `htmlExtensions`). + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`syntax-tree/mdast-util-gfm-table`](https://github.com/syntax-tree/mdast-util-gfm-table) + — mdast utility to support tables +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `micromark/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/micromark/micromark-extension-gfm-table/workflows/main/badge.svg + +[build]: https://github.com/micromark/micromark-extension-gfm-table/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/micromark/micromark-extension-gfm-table.svg + +[coverage]: https://codecov.io/github/micromark/micromark-extension-gfm-table + +[downloads-badge]: https://img.shields.io/npm/dm/micromark-extension-gfm-table.svg + +[downloads]: https://www.npmjs.com/package/micromark-extension-gfm-table + +[size-badge]: https://img.shields.io/bundlephobia/minzip/micromark-extension-gfm-table.svg + +[size]: https://bundlephobia.com/result?p=micromark-extension-gfm-table + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/micromark/micromark/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/micromark/.github/blob/HEAD/contributing.md + +[support]: https://github.com/micromark/.github/blob/HEAD/support.md + +[coc]: https://github.com/micromark/.github/blob/HEAD/code-of-conduct.md + +[micromark]: https://github.com/micromark/micromark + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[remark]: https://github.com/remarkjs/remark + +[mdast-util-gfm-table]: https://github.com/syntax-tree/mdast-util-gfm-table + +[tables]: https://github.github.com/gfm/#tables-extension- diff --git a/node_modules/micromark-extension-gfm-table/syntax.js b/node_modules/micromark-extension-gfm-table/syntax.js new file mode 100644 index 00000000..fd3678aa --- /dev/null +++ b/node_modules/micromark-extension-gfm-table/syntax.js @@ -0,0 +1,576 @@ +exports.flow = { + null: {tokenize: tokenizeTable, resolve: resolveTable, interruptible: true} +} + +var createSpace = require('micromark/dist/tokenize/factory-space') + +var setextUnderlineMini = {tokenize: tokenizeSetextUnderlineMini, partial: true} +var nextPrefixedOrBlank = {tokenize: tokenizeNextPrefixedOrBlank, partial: true} + +function resolveTable(events, context) { + var length = events.length + var index = -1 + var token + var inHead + var inDelimiterRow + var inRow + var cell + var content + var text + var contentStart + var contentEnd + var cellStart + + while (++index < length) { + token = events[index][1] + + if (inRow) { + if (token.type === 'temporaryTableCellContent') { + contentStart = contentStart || index + contentEnd = index + } + + if ( + // Combine separate content parts into one. + (token.type === 'tableCellDivider' || token.type === 'tableRow') && + contentEnd + ) { + content = { + type: 'tableContent', + start: events[contentStart][1].start, + end: events[contentEnd][1].end + } + text = { + type: 'chunkText', + start: content.start, + end: content.end, + contentType: 'text' + } + + events.splice( + contentStart, + contentEnd - contentStart + 1, + ['enter', content, context], + ['enter', text, context], + ['exit', text, context], + ['exit', content, context] + ) + index -= contentEnd - contentStart - 3 + length = events.length + contentStart = undefined + contentEnd = undefined + } + } + + if ( + events[index][0] === 'exit' && + cellStart && + cellStart + 1 < index && + (token.type === 'tableCellDivider' || + (token.type === 'tableRow' && + (cellStart + 3 < index || + events[cellStart][1].type !== 'whitespace'))) + ) { + cell = { + type: inDelimiterRow + ? 'tableDelimiter' + : inHead + ? 'tableHeader' + : 'tableData', + start: events[cellStart][1].start, + end: events[index][1].end + } + events.splice(index + (token.type === 'tableCellDivider' ? 1 : 0), 0, [ + 'exit', + cell, + context + ]) + events.splice(cellStart, 0, ['enter', cell, context]) + index += 2 + length = events.length + cellStart = index + 1 + } + + if (token.type === 'tableRow') { + inRow = events[index][0] === 'enter' + + if (inRow) { + cellStart = index + 1 + } + } + + if (token.type === 'tableDelimiterRow') { + inDelimiterRow = events[index][0] === 'enter' + + if (inDelimiterRow) { + cellStart = index + 1 + } + } + + if (token.type === 'tableHead') { + inHead = events[index][0] === 'enter' + } + } + + return events +} + +function tokenizeTable(effects, ok, nok) { + var align = [] + var tableHeaderCount = 0 + var seenDelimiter + var hasDash + + return start + + function start(code) { + /* istanbul ignore if - used to be passed in beta micromark versions. */ + if (code === null || code === -5 || code === -4 || code === -3) { + return nok(code) + } + + effects.enter('table')._align = align + effects.enter('tableHead') + effects.enter('tableRow') + + // If we start with a pipe, we open a cell marker. + if (code === 124) { + return cellDividerHead(code) + } + + tableHeaderCount++ + effects.enter('temporaryTableCellContent') + // Can’t be space or eols at the start of a construct, so we’re in a cell. + return inCellContentHead(code) + } + + function cellDividerHead(code) { + // Always a pipe. + effects.enter('tableCellDivider') + effects.consume(code) + effects.exit('tableCellDivider') + seenDelimiter = true + return cellBreakHead + } + + function cellBreakHead(code) { + // EOF, CR, LF, CRLF. + if (code === null || code === -5 || code === -4 || code === -3) { + return atRowEndHead(code) + } + + // HT, VS, SP. + if (code === -2 || code === -1 || code === 32) { + effects.enter('whitespace') + effects.consume(code) + return inWhitespaceHead + } + + if (seenDelimiter) { + seenDelimiter = undefined + tableHeaderCount++ + } + + // `|` + if (code === 124) { + return cellDividerHead(code) + } + + // Anything else is cell content. + effects.enter('temporaryTableCellContent') + return inCellContentHead(code) + } + + function inWhitespaceHead(code) { + // HT, VS, SP. + if (code === -2 || code === -1 || code === 32) { + effects.consume(code) + return inWhitespaceHead + } + + effects.exit('whitespace') + return cellBreakHead(code) + } + + function inCellContentHead(code) { + // EOF, whitespace, pipe + if (code === null || code < 0 || code === 32 || code === 124) { + effects.exit('temporaryTableCellContent') + return cellBreakHead(code) + } + + effects.consume(code) + // `\` + return code === 92 ? inCellContentEscapeHead : inCellContentHead + } + + function inCellContentEscapeHead(code) { + // `\` or `|` + if (code === 92 || code === 124) { + effects.consume(code) + return inCellContentHead + } + + // Anything else. + return inCellContentHead(code) + } + + function atRowEndHead(code) { + if (code === null) { + return nok(code) + } + + effects.exit('tableRow') + effects.exit('tableHead') + + // Always a line ending. + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + + // If a setext heading, exit. + return effects.check( + setextUnderlineMini, + nok, + // Support an indent before the delimiter row. + createSpace(effects, rowStartDelimiter, 'linePrefix', 4) + ) + } + + function rowStartDelimiter(code) { + // If there’s another space, or we’re at the EOL/EOF, exit. + if (code === null || code < 0 || code === 32) { + return nok(code) + } + + effects.enter('tableDelimiterRow') + return atDelimiterRowBreak(code) + } + + function atDelimiterRowBreak(code) { + // EOF, CR, LF, CRLF. + if (code === null || code === -5 || code === -4 || code === -3) { + return rowEndDelimiter(code) + } + + // HT, VS, SP. + if (code === -2 || code === -1 || code === 32) { + effects.enter('whitespace') + effects.consume(code) + return inWhitespaceDelimiter + } + + // `-` + if (code === 45) { + effects.enter('tableDelimiterFiller') + effects.consume(code) + hasDash = true + align.push(null) + return inFillerDelimiter + } + + // `:` + if (code === 58) { + effects.enter('tableDelimiterAlignment') + effects.consume(code) + effects.exit('tableDelimiterAlignment') + align.push('left') + return afterLeftAlignment + } + + // If we start with a pipe, we open a cell marker. + if (code === 124) { + effects.enter('tableCellDivider') + effects.consume(code) + effects.exit('tableCellDivider') + return atDelimiterRowBreak + } + + return nok(code) + } + + function inWhitespaceDelimiter(code) { + // HT, VS, SP. + if (code === -2 || code === -1 || code === 32) { + effects.consume(code) + return inWhitespaceDelimiter + } + + effects.exit('whitespace') + return atDelimiterRowBreak(code) + } + + function inFillerDelimiter(code) { + // `-` + if (code === 45) { + effects.consume(code) + return inFillerDelimiter + } + + effects.exit('tableDelimiterFiller') + + // `:` + if (code === 58) { + effects.enter('tableDelimiterAlignment') + effects.consume(code) + effects.exit('tableDelimiterAlignment') + + align[align.length - 1] = + align[align.length - 1] === 'left' ? 'center' : 'right' + + return afterRightAlignment + } + + return atDelimiterRowBreak(code) + } + + function afterLeftAlignment(code) { + // `-` + if (code === 45) { + effects.enter('tableDelimiterFiller') + effects.consume(code) + hasDash = true + return inFillerDelimiter + } + + // Anything else is not ok. + return nok(code) + } + + function afterRightAlignment(code) { + // EOF, CR, LF, CRLF. + if (code === null || code === -5 || code === -4 || code === -3) { + return rowEndDelimiter(code) + } + + // HT, VS, SP. + if (code === -2 || code === -1 || code === 32) { + effects.enter('whitespace') + effects.consume(code) + return inWhitespaceDelimiter + } + + // `|` + if (code === 124) { + effects.enter('tableCellDivider') + effects.consume(code) + effects.exit('tableCellDivider') + return atDelimiterRowBreak + } + + return nok(code) + } + + function rowEndDelimiter(code) { + effects.exit('tableDelimiterRow') + + // Exit if there was no dash at all, or if the header cell count is not the + // delimiter cell count. + if (!hasDash || tableHeaderCount !== align.length) { + return nok(code) + } + + if (code === null) { + return tableClose(code) + } + + return effects.check(nextPrefixedOrBlank, tableClose, tableContinue)(code) + } + + function tableClose(code) { + effects.exit('table') + return ok(code) + } + + function tableContinue(code) { + // Always a line ending. + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + // We checked that it’s not a prefixed or blank line, so we’re certain a + // body is coming, though it may be indented. + return createSpace(effects, bodyStart, 'linePrefix', 4) + } + + function bodyStart(code) { + effects.enter('tableBody') + return rowStartBody(code) + } + + function rowStartBody(code) { + effects.enter('tableRow') + + // If we start with a pipe, we open a cell marker. + if (code === 124) { + return cellDividerBody(code) + } + + effects.enter('temporaryTableCellContent') + // Can’t be space or eols at the start of a construct, so we’re in a cell. + return inCellContentBody(code) + } + + function cellDividerBody(code) { + // Always a pipe. + effects.enter('tableCellDivider') + effects.consume(code) + effects.exit('tableCellDivider') + return cellBreakBody + } + + function cellBreakBody(code) { + // EOF, CR, LF, CRLF. + if (code === null || code === -5 || code === -4 || code === -3) { + return atRowEndBody(code) + } + + // HT, VS, SP. + if (code === -2 || code === -1 || code === 32) { + effects.enter('whitespace') + effects.consume(code) + return inWhitespaceBody + } + + // `|` + if (code === 124) { + return cellDividerBody(code) + } + + // Anything else is cell content. + effects.enter('temporaryTableCellContent') + return inCellContentBody(code) + } + + function inWhitespaceBody(code) { + // HT, VS, SP. + if (code === -2 || code === -1 || code === 32) { + effects.consume(code) + return inWhitespaceBody + } + + effects.exit('whitespace') + return cellBreakBody(code) + } + + function inCellContentBody(code) { + // EOF, whitespace, pipe + if (code === null || code < 0 || code === 32 || code === 124) { + effects.exit('temporaryTableCellContent') + return cellBreakBody(code) + } + + effects.consume(code) + // `\` + return code === 92 ? inCellContentEscapeBody : inCellContentBody + } + + function inCellContentEscapeBody(code) { + // `\` or `|` + if (code === 92 || code === 124) { + effects.consume(code) + return inCellContentBody + } + + // Anything else. + return inCellContentBody(code) + } + + function atRowEndBody(code) { + effects.exit('tableRow') + + if (code === null) { + return tableBodyClose(code) + } + + return effects.check( + nextPrefixedOrBlank, + tableBodyClose, + tableBodyContinue + )(code) + } + + function tableBodyClose(code) { + effects.exit('tableBody') + return tableClose(code) + } + + function tableBodyContinue(code) { + // Always a line ending. + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + // Support an optional prefix, then start a body row. + return createSpace(effects, rowStartBody, 'linePrefix', 4) + } +} + +// Based on micromark, but that won’t work as we’re in a table, and that expects +// content. +// +function tokenizeSetextUnderlineMini(effects, ok, nok) { + return start + + function start(code) { + // `-` + if (code !== 45) { + return nok(code) + } + + effects.enter('setextUnderline') + return sequence(code) + } + + function sequence(code) { + if (code === 45) { + effects.consume(code) + return sequence + } + + return whitespace(code) + } + + function whitespace(code) { + if (code === -2 || code === -1 || code === 32) { + effects.consume(code) + return whitespace + } + + if (code === null || code === -5 || code === -4 || code === -3) { + return ok(code) + } + + return nok(code) + } +} + +function tokenizeNextPrefixedOrBlank(effects, ok, nok) { + var size = 0 + + return start + + function start(code) { + // This is a check, so we don’t care about tokens, but we open a bogus one + // so we’re valid. + effects.enter('check') + // EOL. + effects.consume(code) + return whitespace + } + + function whitespace(code) { + // VS or SP. + if (code === -1 || code === 32) { + effects.consume(code) + size++ + return size === 4 ? ok : whitespace + } + + // EOF or whitespace + if (code === null || code < 0) { + return ok(code) + } + + // Anything else. + return nok(code) + } +} diff --git a/node_modules/micromark-extension-gfm-tagfilter/html.js b/node_modules/micromark-extension-gfm-tagfilter/html.js new file mode 100644 index 00000000..d19d6a36 --- /dev/null +++ b/node_modules/micromark-extension-gfm-tagfilter/html.js @@ -0,0 +1,29 @@ +exports.exit = { + htmlFlowData: exitHtmlFlowData, + htmlTextData: exitHtmlTextData +} + +// An opening or closing tag, followed by a case-insensitive specific tag name, +// followed by HTML whitespace, a greater than, or a slash. +var reFlow = /<(\/?)(iframe|noembed|noframes|plaintext|script|style|title|textarea|xmp)(?=[\t\n\f\r />])/gi +// As HTML (text) parses tags separately (and v. strictly), we don’t need to be +// global. +var reText = new RegExp('^' + reFlow.source, 'i') + +function exitHtmlFlowData(token) { + exitHtmlData.call(this, token, reFlow) +} + +function exitHtmlTextData(token) { + exitHtmlData.call(this, token, reText) +} + +function exitHtmlData(token, filter) { + var value = this.sliceSerialize(token) + + if (this.options.allowDangerousHtml) { + value = value.replace(filter, '<$1$2') + } + + this.raw(this.encode(value)) +} diff --git a/node_modules/micromark-extension-gfm-tagfilter/index.js b/node_modules/micromark-extension-gfm-tagfilter/index.js new file mode 100644 index 00000000..b80f9135 --- /dev/null +++ b/node_modules/micromark-extension-gfm-tagfilter/index.js @@ -0,0 +1 @@ +module.exports = require('./html') diff --git a/node_modules/micromark-extension-gfm-tagfilter/license b/node_modules/micromark-extension-gfm-tagfilter/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/micromark-extension-gfm-tagfilter/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/micromark-extension-gfm-tagfilter/package.json b/node_modules/micromark-extension-gfm-tagfilter/package.json new file mode 100644 index 00000000..868d4d98 --- /dev/null +++ b/node_modules/micromark-extension-gfm-tagfilter/package.json @@ -0,0 +1,71 @@ +{ + "name": "micromark-extension-gfm-tagfilter", + "version": "0.3.0", + "description": "micromark extension to support GFM tagfilter", + "license": "MIT", + "keywords": [ + "micromark", + "micromark-extension", + "tagfilter", + "tag", + "filter", + "dangerous", + "html", + "gfm", + "markdown", + "unified" + ], + "repository": "micromark/micromark-extension-gfm-tagfilter", + "bugs": "https://github.com/micromark/micromark-extension-gfm-tagfilter/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "index.js", + "html.js" + ], + "dependencies": {}, + "devDependencies": { + "micromark": "~2.6.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^8.0.0", + "remark-preset-wooorm": "^7.0.0", + "tape": "^5.0.0", + "xo": "^0.33.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test/index.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/micromark-extension-gfm-tagfilter/readme.md b/node_modules/micromark-extension-gfm-tagfilter/readme.md new file mode 100644 index 00000000..5712005c --- /dev/null +++ b/node_modules/micromark-extension-gfm-tagfilter/readme.md @@ -0,0 +1,110 @@ +# micromark-extension-gfm-tagfilter + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +**[micromark][]** extension to support GitHub flavored markdown [tag filter][]. +This syntax extension matches the GFM spec and github.com. +The [tag filter][] is a rather naïve attempt at XSS protection. +It’s much better to use a proper HTML sanitizing algorithm. + +This package provides the low-level modules for integrating with the micromark +tokenizer and the micromark HTML compiler. + +## Install + +[npm][]: + +```sh +npm install micromark-extension-gfm-tagfilter +``` + +## API + +### `html` + +> Note: `html` is the default export. + +Support a [tag filter][] (protection against script, plaintext, etc). +The export is an extension for the default HTML compiler (to escape certain +tag names; can be passed in `htmlExtensions`). + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `micromark/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://img.shields.io/travis/micromark/micromark-extension-gfm-tagfilter.svg + +[build]: https://travis-ci.org/micromark/micromark-extension-gfm-tagfilter + +[coverage-badge]: https://img.shields.io/codecov/c/github/micromark/micromark-extension-gfm-tagfilter.svg + +[coverage]: https://codecov.io/github/micromark/micromark-extension-gfm-tagfilter + +[downloads-badge]: https://img.shields.io/npm/dm/micromark-extension-gfm-tagfilter.svg + +[downloads]: https://www.npmjs.com/package/micromark-extension-gfm-tagfilter + +[size-badge]: https://img.shields.io/bundlephobia/minzip/micromark-extension-gfm-tagfilter.svg + +[size]: https://bundlephobia.com/result?p=micromark-extension-gfm-tagfilter + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/micromark/unist/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/micromark/.github/blob/HEAD/contributing.md + +[support]: https://github.com/micromark/.github/blob/HEAD/support.md + +[coc]: https://github.com/micromark/.github/blob/HEAD/code-of-conduct.md + +[micromark]: https://github.com/micromark/micromark + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[remark]: https://github.com/remarkjs/remark + +[tag filter]: https://github.github.com/gfm/#disallowed-raw-html-extension- diff --git a/node_modules/micromark-extension-gfm-task-list-item/html.js b/node_modules/micromark-extension-gfm-task-list-item/html.js new file mode 100644 index 00000000..d7a9d5cd --- /dev/null +++ b/node_modules/micromark-extension-gfm-task-list-item/html.js @@ -0,0 +1,14 @@ +exports.enter = {taskListCheck: enterCheck} +exports.exit = {taskListCheck: exitCheck, taskListCheckValueChecked: checked} + +function enterCheck() { + this.tag('') +} diff --git a/node_modules/micromark-extension-gfm-task-list-item/index.js b/node_modules/micromark-extension-gfm-task-list-item/index.js new file mode 100644 index 00000000..b64479d1 --- /dev/null +++ b/node_modules/micromark-extension-gfm-task-list-item/index.js @@ -0,0 +1 @@ +module.exports = require('./syntax') diff --git a/node_modules/micromark-extension-gfm-task-list-item/license b/node_modules/micromark-extension-gfm-task-list-item/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/micromark-extension-gfm-task-list-item/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/micromark-extension-gfm-task-list-item/package.json b/node_modules/micromark-extension-gfm-task-list-item/package.json new file mode 100644 index 00000000..03aea8a8 --- /dev/null +++ b/node_modules/micromark-extension-gfm-task-list-item/package.json @@ -0,0 +1,75 @@ +{ + "name": "micromark-extension-gfm-task-list-item", + "version": "0.3.3", + "description": "micromark extension to support GFM task list items", + "license": "MIT", + "keywords": [ + "micromark", + "micromark-extension", + "task", + "list", + "item", + "check", + "checkbox", + "todo", + "gfm", + "markdown", + "unified" + ], + "repository": "micromark/micromark-extension-gfm-task-list-item", + "bugs": "https://github.com/micromark/micromark-extension-gfm-task-list-item/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "files": [ + "index.js", + "html.js", + "syntax.js" + ], + "dependencies": { + "micromark": "~2.11.0" + }, + "devDependencies": { + "control-pictures": "^1.0.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "xo": "^0.36.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test/index.js", + "test": "npm run format && npm run test-coverage" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/micromark-extension-gfm-task-list-item/readme.md b/node_modules/micromark-extension-gfm-task-list-item/readme.md new file mode 100644 index 00000000..5db83d9f --- /dev/null +++ b/node_modules/micromark-extension-gfm-task-list-item/readme.md @@ -0,0 +1,124 @@ +# micromark-extension-gfm-task-list-item + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +**[micromark][]** extension to support GitHub flavored markdown [task list +items][]. +This extension matches the GFM spec for the few things it defines and otherwise +matches github.com. + +This package provides the low-level modules for integrating with the micromark +tokenizer and the micromark HTML compiler. + +You probably shouldn’t use this package directly, but instead use +[`mdast-util-gfm-task-list-item`][mdast-util-gfm-task-list-item] with +**[mdast][]**. + +## Install + +[npm][]: + +```sh +npm install micromark-extension-gfm-task-list-item +``` + +## API + +### `html` + +### `syntax` + +> Note: `syntax` is the default export of this module, `html` is available at +> `micromark-extension-gfm-task-list-item/html`. + +Support [task list items][]. +The exports are extensions for the micromark parser (to tokenize checks; can be +passed in `extensions`) and the default HTML compiler (to compile as `` +elements; can be passed in `htmlExtensions`). + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`syntax-tree/mdast-util-gfm-task-list-item`](https://github.com/syntax-tree/mdast-util-gfm-task-list-item) + — mdast utility to support task lists +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast + +## Contribute + +See [`contributing.md` in `micromark/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + + + +[build-badge]: https://github.com/micromark/micromark-extension-gfm-task-list-item/workflows/main/badge.svg + +[build]: https://github.com/micromark/micromark-extension-gfm-task-list-item/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/micromark/micromark-extension-gfm-task-list-item.svg + +[coverage]: https://codecov.io/github/micromark/micromark-extension-gfm-task-list-item + +[downloads-badge]: https://img.shields.io/npm/dm/micromark-extension-gfm-task-list-item.svg + +[downloads]: https://www.npmjs.com/package/micromark-extension-gfm-task-list-item + +[size-badge]: https://img.shields.io/bundlephobia/minzip/micromark-extension-gfm-task-list-item.svg + +[size]: https://bundlephobia.com/result?p=micromark-extension-gfm-task-list-item + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/micromark/micromark/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/micromark/.github/blob/HEAD/contributing.md + +[support]: https://github.com/micromark/.github/blob/HEAD/support.md + +[coc]: https://github.com/micromark/.github/blob/HEAD/code-of-conduct.md + +[micromark]: https://github.com/micromark/micromark + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[remark]: https://github.com/remarkjs/remark + +[mdast]: https://github.com/syntax-tree/mdast + +[mdast-util-gfm-task-list-item]: https://github.com/syntax-tree/mdast-util-gfm-task-list-item + +[task list items]: https://github.github.com/gfm/#task-list-items-extension- diff --git a/node_modules/micromark-extension-gfm-task-list-item/syntax.js b/node_modules/micromark-extension-gfm-task-list-item/syntax.js new file mode 100644 index 00000000..b473d21c --- /dev/null +++ b/node_modules/micromark-extension-gfm-task-list-item/syntax.js @@ -0,0 +1,80 @@ +var markdownLineEndingOrSpace = require('micromark/dist/character/markdown-line-ending-or-space') +var spaceFactory = require('micromark/dist/tokenize/factory-space') +var prefixSize = require('micromark/dist/util/prefix-size') + +var tasklistCheck = {tokenize: tokenizeTasklistCheck} + +exports.text = {91: tasklistCheck} + +function tokenizeTasklistCheck(effects, ok, nok) { + var self = this + + return open + + function open(code) { + if ( + // Exit if not `[`. + code !== 91 || + // Exit if there’s stuff before. + self.previous !== null || + // Exit if not in the first content that is the first child of a list + // item. + !self._gfmTasklistFirstContentOfListItem + ) { + return nok(code) + } + + effects.enter('taskListCheck') + effects.enter('taskListCheckMarker') + effects.consume(code) + effects.exit('taskListCheckMarker') + return inside + } + + function inside(code) { + // Tab or space. + if (code === -2 || code === 32) { + effects.enter('taskListCheckValueUnchecked') + effects.consume(code) + effects.exit('taskListCheckValueUnchecked') + return close + } + + // Upper- and lower `x`. + if (code === 88 || code === 120) { + effects.enter('taskListCheckValueChecked') + effects.consume(code) + effects.exit('taskListCheckValueChecked') + return close + } + + return nok(code) + } + + function close(code) { + // `]` + if (code === 93) { + effects.enter('taskListCheckMarker') + effects.consume(code) + effects.exit('taskListCheckMarker') + effects.exit('taskListCheck') + return effects.check({tokenize: spaceThenNonSpace}, ok, nok) + } + + return nok(code) + } +} + +function spaceThenNonSpace(effects, ok, nok) { + var self = this + + return spaceFactory(effects, after, 'whitespace') + + function after(code) { + return prefixSize(self.events, 'whitespace') && + code !== null && + !markdownLineEndingOrSpace(code) + ? ok(code) + : nok(code) + } +} diff --git a/node_modules/micromark-extension-gfm/html.js b/node_modules/micromark-extension-gfm/html.js new file mode 100644 index 00000000..bcab93eb --- /dev/null +++ b/node_modules/micromark-extension-gfm/html.js @@ -0,0 +1,8 @@ +var combine = require('micromark/dist/util/combine-html-extensions') +var autolink = require('micromark-extension-gfm-autolink-literal/html') +var strikethrough = require('micromark-extension-gfm-strikethrough/html') +var table = require('micromark-extension-gfm-table/html') +var tagfilter = require('micromark-extension-gfm-tagfilter/html') +var tasklist = require('micromark-extension-gfm-task-list-item/html') + +module.exports = combine([autolink, strikethrough, table, tagfilter, tasklist]) diff --git a/node_modules/micromark-extension-gfm/index.js b/node_modules/micromark-extension-gfm/index.js new file mode 100644 index 00000000..b64479d1 --- /dev/null +++ b/node_modules/micromark-extension-gfm/index.js @@ -0,0 +1 @@ +module.exports = require('./syntax') diff --git a/node_modules/micromark-extension-gfm/license b/node_modules/micromark-extension-gfm/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/micromark-extension-gfm/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/micromark-extension-gfm/package.json b/node_modules/micromark-extension-gfm/package.json new file mode 100644 index 00000000..cf05777f --- /dev/null +++ b/node_modules/micromark-extension-gfm/package.json @@ -0,0 +1,95 @@ +{ + "name": "micromark-extension-gfm", + "version": "0.3.3", + "description": "micromark extension to support GFM (GitHub Flavored Markdown)", + "license": "MIT", + "keywords": [ + "micromark", + "micromark-extension", + "table", + "strikethrough", + "tasklist", + "autolink", + "tagfilter", + "github", + "gfm", + "markdown", + "unified" + ], + "repository": "micromark/micromark-extension-gfm", + "bugs": "https://github.com/micromark/micromark-extension-gfm/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer (https://wooorm.com)", + "contributors": [ + "Titus Wormer (https://wooorm.com)" + ], + "types": "types/index.d.ts", + "files": [ + "types/*.d.ts", + "index.js", + "html.js", + "syntax.js" + ], + "dependencies": { + "micromark": "~2.11.0", + "micromark-extension-gfm-autolink-literal": "~0.5.0", + "micromark-extension-gfm-strikethrough": "~0.6.5", + "micromark-extension-gfm-table": "~0.4.0", + "micromark-extension-gfm-tagfilter": "~0.3.0", + "micromark-extension-gfm-task-list-item": "~0.3.0" + }, + "devDependencies": { + "dtslint": "^4.0.0", + "hast-util-select": "^4.0.0", + "hast-util-to-text": "^2.0.0", + "node-fetch": "^2.6.1", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "rehype-parse": "^7.0.1", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "unified": "^9.0.0", + "xo": "^0.38.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test/index.js", + "test-types": "dtslint types", + "test": "npm run format && npm run test-coverage && npm run test-types" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "ignores": [ + "types" + ], + "rules": { + "guard-for-in": "off", + "unicorn/no-array-for-each": "off" + } + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/micromark-extension-gfm/readme.md b/node_modules/micromark-extension-gfm/readme.md new file mode 100644 index 00000000..cdc58cb2 --- /dev/null +++ b/node_modules/micromark-extension-gfm/readme.md @@ -0,0 +1,246 @@ +# micromark-extension-gfm + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +**[micromark][]** extension to support GitHub flavored markdown. +This extension matches either the [GFM][] spec or github.com (default). + +This package provides the low-level modules for integrating with the micromark +tokenizer and the micromark HTML compiler. + +You probably shouldn’t use this package directly, but instead use +[`mdast-util-gfm`][mdast-util-gfm] with **[mdast][]** or `remark-gfm` with +**[remark][]**. + +Alternatively, the extensions can be used separately: + +* [`micromark/micromark-extension-gfm-autolink-literal`](https://github.com/micromark/micromark-extension-gfm-autolink-literal) + — support GFM [autolink literals][] +* [`micromark/micromark-extension-gfm-strikethrough`](https://github.com/micromark/micromark-extension-gfm-strikethrough) + — support GFM [strikethrough][] +* [`micromark/micromark-extension-gfm-table`](https://github.com/micromark/micromark-extension-gfm-table) + — support GFM [tables][] +* [`micromark/micromark-extension-gfm-tagfilter`](https://github.com/micromark/micromark-extension-gfm-tagfilter) + — support GFM [tagfilter][] +* [`micromark/micromark-extension-gfm-task-list-item`](https://github.com/micromark/micromark-extension-gfm-task-list-item) + — support GFM [tasklists][] + +## Install + +[npm][]: + +```sh +npm install micromark-extension-gfm +``` + +## Use + +Say we have the following file, `example.md`: + +```markdown +# GFM + +## Autolink literals + +www.example.com, https://example.com, and contact@example.com. + +## Strikethrough + +~one~ or ~~two~~ tildes. + +## Table + +| a | b | c | d | +| - | :- | -: | :-: | + +## Tag filter + + + +## Tasklist + +* [ ] to do +* [x] done +``` + +And our script, `example.js`, looks as follows: + +```js +var fs = require('fs') +var micromark = require('micromark') +var gfmSyntax = require('micromark-extension-gfm') +var gfmHtml = require('micromark-extension-gfm/html') + +var doc = fs.readFileSync('example.md') + +var result = micromark(doc, { + allowDangerousHtml: true, + extensions: [gfmSyntax()], + htmlExtensions: [gfmHtml] +}) + +console.log(result) +``` + +Now, running `node example` yields: + +```html +<h1>GFM</h1> +<h2>Autolink literals</h2> +<p><a href="http://www.example.com">www.example.com</a>, <a href="https://example.com">https://example.com</a>, and <a href="mailto:contact@example.com">contact@example.com</a>.</p> +<h2>Strikethrough</h2> +<p><del>one</del> or <del>two</del> tildes.</p> +<h2>Table</h2> +<table> +<thead> +<tr> +<th>a</th> +<th align="left">b</th> +<th align="right">c</th> +<th align="center">d</th> +</tr> +</thead> +</table> +<h2>Tag filter</h2> +&lt;plaintext> +<h2>Tasklist</h2> +<ul> +<li><input disabled="" type="checkbox"> to do</li> +<li><input checked="" disabled="" type="checkbox"> done</li> +</ul> +``` + +## API + +### `html` + +### `syntax(options?)` + +> Note: `syntax` is the default export of this module, `html` is available at +> `micromark-extension-gfm/html`. + +Support [GFM][] or markdown on github.com. + +The export of `syntax` is a function that can be called with options and returns +extension for the micromark parser (to tokenize GFM; can be passed in +`extensions`). +The export of html is an extension for the default HTML compiler (can be passed +in `htmlExtensions`). + +##### `options` + +###### `options.singleTilde` + +Passed as [`singleTilde`][single-tilde] in +[`micromark-extension-gfm-strikethrough`][mm-strikethrough]. + +## Related + +* [`remarkjs/remark`][remark] + — markdown processor powered by plugins +* [`syntax-tree/mdast-util-gfm`](https://github.com/syntax-tree/mdast-util-gfm) + — mdast utility to support GFM +* [`syntax-tree/mdast-util-from-markdown`][from-markdown] + — mdast parser using `micromark` to create mdast from markdown +* [`syntax-tree/mdast-util-to-markdown`][to-markdown] + — mdast serializer to create markdown from mdast +* [`micromark/micromark`][micromark] + — the smallest commonmark-compliant markdown parser that exists +* [`micromark/micromark-extension-gfm-autolink-literal`](https://github.com/micromark/micromark-extension-gfm-autolink-literal) + — support GFM [autolink literals][] +* [`micromark/micromark-extension-gfm-strikethrough`](https://github.com/micromark/micromark-extension-gfm-strikethrough) + — support GFM [strikethrough][] +* [`micromark/micromark-extension-gfm-table`](https://github.com/micromark/micromark-extension-gfm-table) + — support GFM [tables][] +* [`micromark/micromark-extension-gfm-tagfilter`](https://github.com/micromark/micromark-extension-gfm-tagfilter) + — support GFM [tagfilter][] +* [`micromark/micromark-extension-gfm-task-list-item`](https://github.com/micromark/micromark-extension-gfm-task-list-item) + — support GFM [tasklists][] + +## Contribute + +See [`contributing.md` in `micromark/.github`][contributing] for ways to get +started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + +<!-- Definitions --> + +[build-badge]: https://github.com/micromark/micromark-extension-gfm/workflows/main/badge.svg + +[build]: https://github.com/micromark/micromark-extension-gfm/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/micromark/micromark-extension-gfm.svg + +[coverage]: https://codecov.io/github/micromark/micromark-extension-gfm + +[downloads-badge]: https://img.shields.io/npm/dm/micromark-extension-gfm.svg + +[downloads]: https://www.npmjs.com/package/micromark-extension-gfm + +[size-badge]: https://img.shields.io/bundlephobia/minzip/micromark-extension-gfm.svg + +[size]: https://bundlephobia.com/result?p=micromark-extension-gfm + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/micromark/micromark/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[contributing]: https://github.com/micromark/.github/blob/HEAD/contributing.md + +[support]: https://github.com/micromark/.github/blob/HEAD/support.md + +[coc]: https://github.com/micromark/.github/blob/HEAD/code-of-conduct.md + +[micromark]: https://github.com/micromark/micromark + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown + +[remark]: https://github.com/remarkjs/remark + +[mdast]: https://github.com/syntax-tree/mdast + +[gfm]: https://github.github.com/gfm/ + +[mdast-util-gfm]: https://github.com/syntax-tree/mdast-util-gfm + +[strikethrough]: https://github.github.com/gfm/#strikethrough-extension- + +[tables]: https://github.github.com/gfm/#tables-extension- + +[tasklists]: https://github.github.com/gfm/#task-list-items-extension- + +[autolink literals]: https://github.github.com/gfm/#autolinks-extension- + +[tagfilter]: https://github.github.com/gfm/#disallowed-raw-html-extension- + +[single-tilde]: https://github.com/micromark/micromark-extension-gfm-strikethrough#optionssingletilde + +[mm-strikethrough]: https://github.com/micromark/micromark-extension-gfm-strikethrough diff --git a/node_modules/micromark-extension-gfm/syntax.js b/node_modules/micromark-extension-gfm/syntax.js new file mode 100644 index 00000000..059f41d8 --- /dev/null +++ b/node_modules/micromark-extension-gfm/syntax.js @@ -0,0 +1,11 @@ +var combine = require('micromark/dist/util/combine-extensions') +var autolink = require('micromark-extension-gfm-autolink-literal') +var strikethrough = require('micromark-extension-gfm-strikethrough') +var table = require('micromark-extension-gfm-table') +var tasklist = require('micromark-extension-gfm-task-list-item') + +module.exports = create + +function create(options) { + return combine([autolink, strikethrough(options), table, tasklist]) +} diff --git a/node_modules/micromark-extension-gfm/types/html.d.ts b/node_modules/micromark-extension-gfm/types/html.d.ts new file mode 100644 index 00000000..90784732 --- /dev/null +++ b/node_modules/micromark-extension-gfm/types/html.d.ts @@ -0,0 +1,8 @@ +import {HtmlExtension} from 'micromark/dist/shared-types' + +/** + * The export of html is an extension for the default HTML compiler (can be + * passed in `htmlExtensions`). + */ +declare const html: HtmlExtension +export = html diff --git a/node_modules/micromark-extension-gfm/types/index.d.ts b/node_modules/micromark-extension-gfm/types/index.d.ts new file mode 100644 index 00000000..dc8ae246 --- /dev/null +++ b/node_modules/micromark-extension-gfm/types/index.d.ts @@ -0,0 +1,19 @@ +// TypeScript Version: 4.0 + +import {SyntaxExtension} from 'micromark/dist/shared-types' +import {GfmStrikethroughOptions} from 'micromark-extension-gfm-strikethrough' + +/** + * Support GFM or markdown on github.com. + * + * The export of `syntax` is a function that can be called with options and + * returns extension for the micromark parser (to tokenize GFM; can be passed + * in `extensions`). + */ +declare function syntax(options?: syntax.GfmOptions): SyntaxExtension + +declare namespace syntax { + type GfmOptions = GfmStrikethroughOptions +} + +export = syntax diff --git a/node_modules/micromark/buffer.d.ts b/node_modules/micromark/buffer.d.ts new file mode 100644 index 00000000..da97e2c4 --- /dev/null +++ b/node_modules/micromark/buffer.d.ts @@ -0,0 +1,5 @@ +// Minimum TypeScript Version: 3.0 + +import buffer from './dist' + +export default buffer diff --git a/node_modules/micromark/buffer.js b/node_modules/micromark/buffer.js new file mode 100644 index 00000000..2b74f75a --- /dev/null +++ b/node_modules/micromark/buffer.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('./dist') diff --git a/node_modules/micromark/buffer.mjs b/node_modules/micromark/buffer.mjs new file mode 100644 index 00000000..9b91a071 --- /dev/null +++ b/node_modules/micromark/buffer.mjs @@ -0,0 +1 @@ +export {default} from './dist/index.js' diff --git a/node_modules/micromark/dist/character/ascii-alpha.js b/node_modules/micromark/dist/character/ascii-alpha.js new file mode 100644 index 00000000..4e5b20d2 --- /dev/null +++ b/node_modules/micromark/dist/character/ascii-alpha.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var asciiAlpha = regexCheck(/[A-Za-z]/) + +module.exports = asciiAlpha diff --git a/node_modules/micromark/dist/character/ascii-alphanumeric.js b/node_modules/micromark/dist/character/ascii-alphanumeric.js new file mode 100644 index 00000000..4ab36027 --- /dev/null +++ b/node_modules/micromark/dist/character/ascii-alphanumeric.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var asciiAlphanumeric = regexCheck(/[\dA-Za-z]/) + +module.exports = asciiAlphanumeric diff --git a/node_modules/micromark/dist/character/ascii-atext.js b/node_modules/micromark/dist/character/ascii-atext.js new file mode 100644 index 00000000..8962f996 --- /dev/null +++ b/node_modules/micromark/dist/character/ascii-atext.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var asciiAtext = regexCheck(/[#-'*+\--9=?A-Z^-~]/) + +module.exports = asciiAtext diff --git a/node_modules/micromark/dist/character/ascii-control.js b/node_modules/micromark/dist/character/ascii-control.js new file mode 100644 index 00000000..604ed1f2 --- /dev/null +++ b/node_modules/micromark/dist/character/ascii-control.js @@ -0,0 +1,12 @@ +'use strict' + +// Note: EOF is seen as ASCII control here, because `null < 32 == true`. +function asciiControl(code) { + return ( + // Special whitespace codes (which have negative values), C0 and Control + // character DEL + code < 32 || code === 127 + ) +} + +module.exports = asciiControl diff --git a/node_modules/micromark/dist/character/ascii-digit.js b/node_modules/micromark/dist/character/ascii-digit.js new file mode 100644 index 00000000..da614c4e --- /dev/null +++ b/node_modules/micromark/dist/character/ascii-digit.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var asciiDigit = regexCheck(/\d/) + +module.exports = asciiDigit diff --git a/node_modules/micromark/dist/character/ascii-hex-digit.js b/node_modules/micromark/dist/character/ascii-hex-digit.js new file mode 100644 index 00000000..a0e7af43 --- /dev/null +++ b/node_modules/micromark/dist/character/ascii-hex-digit.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var asciiHexDigit = regexCheck(/[\dA-Fa-f]/) + +module.exports = asciiHexDigit diff --git a/node_modules/micromark/dist/character/ascii-punctuation.js b/node_modules/micromark/dist/character/ascii-punctuation.js new file mode 100644 index 00000000..596b45a5 --- /dev/null +++ b/node_modules/micromark/dist/character/ascii-punctuation.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var asciiPunctuation = regexCheck(/[!-/:-@[-`{-~]/) + +module.exports = asciiPunctuation diff --git a/node_modules/micromark/dist/character/codes.d.ts b/node_modules/micromark/dist/character/codes.d.ts new file mode 100644 index 00000000..dc1005c6 --- /dev/null +++ b/node_modules/micromark/dist/character/codes.d.ts @@ -0,0 +1,3 @@ +// This module is generated by `script/`. + +export type Code = null | number diff --git a/node_modules/micromark/dist/character/codes.js b/node_modules/micromark/dist/character/codes.js new file mode 100644 index 00000000..01ea00a6 --- /dev/null +++ b/node_modules/micromark/dist/character/codes.js @@ -0,0 +1,257 @@ +'use strict' + +// This module is compiled away! +// +// micromark works based on character codes. +// This module contains constants for the ASCII block and the replacement +// character. +// A couple of them are handled in a special way, such as the line endings +// (CR, LF, and CR+LF, commonly known as end-of-line: EOLs), the tab (horizontal +// tab) and its expansion based on what column it’s at (virtual space), +// and the end-of-file (eof) character. +// As values are preprocessed before handling them, the actual characters LF, +// CR, HT, and NUL (which is present as the replacement character), are +// guaranteed to not exist. +// +// Unicode basic latin block. +var codes = { + carriageReturn: -5, + lineFeed: -4, + carriageReturnLineFeed: -3, + horizontalTab: -2, + virtualSpace: -1, + eof: null, + nul: 0, + soh: 1, + stx: 2, + etx: 3, + eot: 4, + enq: 5, + ack: 6, + bel: 7, + bs: 8, + ht: 9, + // `\t` + lf: 10, + // `\n` + vt: 11, + // `\v` + ff: 12, + // `\f` + cr: 13, + // `\r` + so: 14, + si: 15, + dle: 16, + dc1: 17, + dc2: 18, + dc3: 19, + dc4: 20, + nak: 21, + syn: 22, + etb: 23, + can: 24, + em: 25, + sub: 26, + esc: 27, + fs: 28, + gs: 29, + rs: 30, + us: 31, + space: 32, + exclamationMark: 33, + // `!` + quotationMark: 34, + // `"` + numberSign: 35, + // `#` + dollarSign: 36, + // `$` + percentSign: 37, + // `%` + ampersand: 38, + // `&` + apostrophe: 39, + // `'` + leftParenthesis: 40, + // `(` + rightParenthesis: 41, + // `)` + asterisk: 42, + // `*` + plusSign: 43, + // `+` + comma: 44, + // `,` + dash: 45, + // `-` + dot: 46, + // `.` + slash: 47, + // `/` + digit0: 48, + // `0` + digit1: 49, + // `1` + digit2: 50, + // `2` + digit3: 51, + // `3` + digit4: 52, + // `4` + digit5: 53, + // `5` + digit6: 54, + // `6` + digit7: 55, + // `7` + digit8: 56, + // `8` + digit9: 57, + // `9` + colon: 58, + // `:` + semicolon: 59, + // `;` + lessThan: 60, + // `<` + equalsTo: 61, + // `=` + greaterThan: 62, + // `>` + questionMark: 63, + // `?` + atSign: 64, + // `@` + uppercaseA: 65, + // `A` + uppercaseB: 66, + // `B` + uppercaseC: 67, + // `C` + uppercaseD: 68, + // `D` + uppercaseE: 69, + // `E` + uppercaseF: 70, + // `F` + uppercaseG: 71, + // `G` + uppercaseH: 72, + // `H` + uppercaseI: 73, + // `I` + uppercaseJ: 74, + // `J` + uppercaseK: 75, + // `K` + uppercaseL: 76, + // `L` + uppercaseM: 77, + // `M` + uppercaseN: 78, + // `N` + uppercaseO: 79, + // `O` + uppercaseP: 80, + // `P` + uppercaseQ: 81, + // `Q` + uppercaseR: 82, + // `R` + uppercaseS: 83, + // `S` + uppercaseT: 84, + // `T` + uppercaseU: 85, + // `U` + uppercaseV: 86, + // `V` + uppercaseW: 87, + // `W` + uppercaseX: 88, + // `X` + uppercaseY: 89, + // `Y` + uppercaseZ: 90, + // `Z` + leftSquareBracket: 91, + // `[` + backslash: 92, + // `\` + rightSquareBracket: 93, + // `]` + caret: 94, + // `^` + underscore: 95, + // `_` + graveAccent: 96, + // `` ` `` + lowercaseA: 97, + // `a` + lowercaseB: 98, + // `b` + lowercaseC: 99, + // `c` + lowercaseD: 100, + // `d` + lowercaseE: 101, + // `e` + lowercaseF: 102, + // `f` + lowercaseG: 103, + // `g` + lowercaseH: 104, + // `h` + lowercaseI: 105, + // `i` + lowercaseJ: 106, + // `j` + lowercaseK: 107, + // `k` + lowercaseL: 108, + // `l` + lowercaseM: 109, + // `m` + lowercaseN: 110, + // `n` + lowercaseO: 111, + // `o` + lowercaseP: 112, + // `p` + lowercaseQ: 113, + // `q` + lowercaseR: 114, + // `r` + lowercaseS: 115, + // `s` + lowercaseT: 116, + // `t` + lowercaseU: 117, + // `u` + lowercaseV: 118, + // `v` + lowercaseW: 119, + // `w` + lowercaseX: 120, + // `x` + lowercaseY: 121, + // `y` + lowercaseZ: 122, + // `z` + leftCurlyBrace: 123, + // `{` + verticalBar: 124, + // `|` + rightCurlyBrace: 125, + // `}` + tilde: 126, + // `~` + del: 127, + // Unicode Specials block. + byteOrderMarker: 65279, + // Unicode Specials block. + replacementCharacter: 65533 // `�` +} + +module.exports = codes diff --git a/node_modules/micromark/dist/character/markdown-line-ending-or-space.js b/node_modules/micromark/dist/character/markdown-line-ending-or-space.js new file mode 100644 index 00000000..d78d17d1 --- /dev/null +++ b/node_modules/micromark/dist/character/markdown-line-ending-or-space.js @@ -0,0 +1,7 @@ +'use strict' + +function markdownLineEndingOrSpace(code) { + return code < 0 || code === 32 +} + +module.exports = markdownLineEndingOrSpace diff --git a/node_modules/micromark/dist/character/markdown-line-ending.js b/node_modules/micromark/dist/character/markdown-line-ending.js new file mode 100644 index 00000000..5893934c --- /dev/null +++ b/node_modules/micromark/dist/character/markdown-line-ending.js @@ -0,0 +1,7 @@ +'use strict' + +function markdownLineEnding(code) { + return code < -2 +} + +module.exports = markdownLineEnding diff --git a/node_modules/micromark/dist/character/markdown-space.js b/node_modules/micromark/dist/character/markdown-space.js new file mode 100644 index 00000000..e1b907b3 --- /dev/null +++ b/node_modules/micromark/dist/character/markdown-space.js @@ -0,0 +1,7 @@ +'use strict' + +function markdownSpace(code) { + return code === -2 || code === -1 || code === 32 +} + +module.exports = markdownSpace diff --git a/node_modules/micromark/dist/character/unicode-punctuation.js b/node_modules/micromark/dist/character/unicode-punctuation.js new file mode 100644 index 00000000..eea51658 --- /dev/null +++ b/node_modules/micromark/dist/character/unicode-punctuation.js @@ -0,0 +1,10 @@ +'use strict' + +var unicodePunctuationRegex = require('../constant/unicode-punctuation-regex.js') +var regexCheck = require('../util/regex-check.js') + +// In fact adds to the bundle size. + +var unicodePunctuation = regexCheck(unicodePunctuationRegex) + +module.exports = unicodePunctuation diff --git a/node_modules/micromark/dist/character/unicode-whitespace.js b/node_modules/micromark/dist/character/unicode-whitespace.js new file mode 100644 index 00000000..b09537ea --- /dev/null +++ b/node_modules/micromark/dist/character/unicode-whitespace.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var unicodeWhitespace = regexCheck(/\s/) + +module.exports = unicodeWhitespace diff --git a/node_modules/micromark/dist/character/values.d.ts b/node_modules/micromark/dist/character/values.d.ts new file mode 100644 index 00000000..bf82ac1a --- /dev/null +++ b/node_modules/micromark/dist/character/values.d.ts @@ -0,0 +1,102 @@ +// This module is generated by `script/`. + +export type Value = + | '\t' + | '\n' + | '\r' + | ' ' + | '!' + | '"' + | '#' + | '$' + | '%' + | '&' + | "'" + | '(' + | ')' + | '*' + | '+' + | ',' + | '-' + | '.' + | '/' + | '0' + | '1' + | '2' + | '3' + | '4' + | '5' + | '6' + | '7' + | '8' + | '9' + | ':' + | ';' + | '<' + | '=' + | '>' + | '?' + | '@' + | 'A' + | 'B' + | 'C' + | 'D' + | 'E' + | 'F' + | 'G' + | 'H' + | 'I' + | 'J' + | 'K' + | 'L' + | 'M' + | 'N' + | 'O' + | 'P' + | 'Q' + | 'R' + | 'S' + | 'T' + | 'U' + | 'V' + | 'W' + | 'X' + | 'Y' + | 'Z' + | '[' + | '\\' + | ']' + | '^' + | '_' + | '`' + | 'a' + | 'b' + | 'c' + | 'd' + | 'e' + | 'f' + | 'g' + | 'h' + | 'i' + | 'j' + | 'k' + | 'l' + | 'm' + | 'n' + | 'o' + | 'p' + | 'q' + | 'r' + | 's' + | 't' + | 'u' + | 'v' + | 'w' + | 'x' + | 'y' + | 'z' + | '{' + | '|' + | '}' + | '~' + | '�' diff --git a/node_modules/micromark/dist/character/values.js b/node_modules/micromark/dist/character/values.js new file mode 100644 index 00000000..cd1794fd --- /dev/null +++ b/node_modules/micromark/dist/character/values.js @@ -0,0 +1,111 @@ +'use strict' + +// This module is compiled away! +// +// While micromark works based on character codes, this module includes the +// string versions of ’em. +// The C0 block, except for LF, CR, HT, and w/ the replacement character added, +// are available here. +var values = { + ht: '\t', + lf: '\n', + cr: '\r', + space: ' ', + exclamationMark: '!', + quotationMark: '"', + numberSign: '#', + dollarSign: '$', + percentSign: '%', + ampersand: '&', + apostrophe: "'", + leftParenthesis: '(', + rightParenthesis: ')', + asterisk: '*', + plusSign: '+', + comma: ',', + dash: '-', + dot: '.', + slash: '/', + digit0: '0', + digit1: '1', + digit2: '2', + digit3: '3', + digit4: '4', + digit5: '5', + digit6: '6', + digit7: '7', + digit8: '8', + digit9: '9', + colon: ':', + semicolon: ';', + lessThan: '<', + equalsTo: '=', + greaterThan: '>', + questionMark: '?', + atSign: '@', + uppercaseA: 'A', + uppercaseB: 'B', + uppercaseC: 'C', + uppercaseD: 'D', + uppercaseE: 'E', + uppercaseF: 'F', + uppercaseG: 'G', + uppercaseH: 'H', + uppercaseI: 'I', + uppercaseJ: 'J', + uppercaseK: 'K', + uppercaseL: 'L', + uppercaseM: 'M', + uppercaseN: 'N', + uppercaseO: 'O', + uppercaseP: 'P', + uppercaseQ: 'Q', + uppercaseR: 'R', + uppercaseS: 'S', + uppercaseT: 'T', + uppercaseU: 'U', + uppercaseV: 'V', + uppercaseW: 'W', + uppercaseX: 'X', + uppercaseY: 'Y', + uppercaseZ: 'Z', + leftSquareBracket: '[', + backslash: '\\', + rightSquareBracket: ']', + caret: '^', + underscore: '_', + graveAccent: '`', + lowercaseA: 'a', + lowercaseB: 'b', + lowercaseC: 'c', + lowercaseD: 'd', + lowercaseE: 'e', + lowercaseF: 'f', + lowercaseG: 'g', + lowercaseH: 'h', + lowercaseI: 'i', + lowercaseJ: 'j', + lowercaseK: 'k', + lowercaseL: 'l', + lowercaseM: 'm', + lowercaseN: 'n', + lowercaseO: 'o', + lowercaseP: 'p', + lowercaseQ: 'q', + lowercaseR: 'r', + lowercaseS: 's', + lowercaseT: 't', + lowercaseU: 'u', + lowercaseV: 'v', + lowercaseW: 'w', + lowercaseX: 'x', + lowercaseY: 'y', + lowercaseZ: 'z', + leftCurlyBrace: '{', + verticalBar: '|', + rightCurlyBrace: '}', + tilde: '~', + replacementCharacter: '�' +} + +module.exports = values diff --git a/node_modules/micromark/dist/compile/html.js b/node_modules/micromark/dist/compile/html.js new file mode 100644 index 00000000..b6170ef5 --- /dev/null +++ b/node_modules/micromark/dist/compile/html.js @@ -0,0 +1,787 @@ +'use strict' + +var decodeEntity = require('parse-entities/decode-entity.js') +var assign = require('../constant/assign.js') +var hasOwnProperty = require('../constant/has-own-property.js') +var combineHtmlExtensions = require('../util/combine-html-extensions.js') +var chunkedPush = require('../util/chunked-push.js') +var miniflat = require('../util/miniflat.js') +var normalizeIdentifier = require('../util/normalize-identifier.js') +var normalizeUri = require('../util/normalize-uri.js') +var safeFromInt = require('../util/safe-from-int.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var decodeEntity__default = /*#__PURE__*/ _interopDefaultLegacy(decodeEntity) + +// While micromark is a lexer/tokenizer, the common case of going from markdown +// dealt with. +// Technically, we can skip `>` and `"` in many cases, but CM includes them. + +var characterReferences = { + '"': 'quot', + '&': 'amp', + '<': 'lt', + '>': 'gt' +} // These two are allowlists of essentially safe protocols for full URLs in +// respectively the `href` (on `<a>`) and `src` (on `<img>`) attributes. +// They are based on what is allowed on GitHub, +// <https://github.com/syntax-tree/hast-util-sanitize/blob/9275b21/lib/github.json#L31> + +var protocolHref = /^(https?|ircs?|mailto|xmpp)$/i +var protocolSrc = /^https?$/i + +function compileHtml(options) { + // Configuration. + // Includes `htmlExtensions` (an array of extensions), `defaultLineEnding` (a + // preferred EOL), `allowDangerousProtocol` (whether to allow potential + // dangerous protocols), and `allowDangerousHtml` (whether to allow potential + // dangerous HTML). + var settings = options || {} // Tags is needed because according to markdown, links and emphasis and + // whatnot can exist in images, however, as HTML doesn’t allow content in + // images, the tags are ignored in the `alt` attribute, but the content + // remains. + + var tags = true // An object to track identifiers to media (URLs and titles) defined with + // definitions. + + var definitions = {} // A lot of the handlers need to capture some of the output data, modify it + // somehow, and then deal with it. + // We do that by tracking a stack of buffers, that can be opened (with + // `buffer`) and closed (with `resume`) to access them. + + var buffers = [[]] // As we can have links in images and the other way around, where the deepest + // ones are closed first, we need to track which one we’re in. + + var mediaStack = [] // Same for tightness, which is specific to lists. + // We need to track if we’re currently in a tight or loose container. + + var tightStack = [] + var defaultHandlers = { + enter: { + blockQuote: onenterblockquote, + codeFenced: onentercodefenced, + codeFencedFenceInfo: buffer, + codeFencedFenceMeta: buffer, + codeIndented: onentercodeindented, + codeText: onentercodetext, + content: onentercontent, + definition: onenterdefinition, + definitionDestinationString: onenterdefinitiondestinationstring, + definitionLabelString: buffer, + definitionTitleString: buffer, + emphasis: onenteremphasis, + htmlFlow: onenterhtmlflow, + htmlText: onenterhtml, + image: onenterimage, + label: buffer, + link: onenterlink, + listItemMarker: onenterlistitemmarker, + listItemValue: onenterlistitemvalue, + listOrdered: onenterlistordered, + listUnordered: onenterlistunordered, + paragraph: onenterparagraph, + reference: buffer, + resource: onenterresource, + resourceDestinationString: onenterresourcedestinationstring, + resourceTitleString: buffer, + setextHeading: onentersetextheading, + strong: onenterstrong + }, + exit: { + atxHeading: onexitatxheading, + atxHeadingSequence: onexitatxheadingsequence, + autolinkEmail: onexitautolinkemail, + autolinkProtocol: onexitautolinkprotocol, + blockQuote: onexitblockquote, + characterEscapeValue: onexitdata, + characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker, + characterReferenceMarkerNumeric: onexitcharacterreferencemarker, + characterReferenceValue: onexitcharacterreferencevalue, + codeFenced: onexitflowcode, + codeFencedFence: onexitcodefencedfence, + codeFencedFenceInfo: onexitcodefencedfenceinfo, + codeFencedFenceMeta: resume, + codeFlowValue: onexitcodeflowvalue, + codeIndented: onexitflowcode, + codeText: onexitcodetext, + codeTextData: onexitdata, + data: onexitdata, + definition: onexitdefinition, + definitionDestinationString: onexitdefinitiondestinationstring, + definitionLabelString: onexitdefinitionlabelstring, + definitionTitleString: onexitdefinitiontitlestring, + emphasis: onexitemphasis, + hardBreakEscape: onexithardbreak, + hardBreakTrailing: onexithardbreak, + htmlFlow: onexithtml, + htmlFlowData: onexitdata, + htmlText: onexithtml, + htmlTextData: onexitdata, + image: onexitmedia, + label: onexitlabel, + labelText: onexitlabeltext, + lineEnding: onexitlineending, + link: onexitmedia, + listOrdered: onexitlistordered, + listUnordered: onexitlistunordered, + paragraph: onexitparagraph, + reference: resume, + referenceString: onexitreferencestring, + resource: resume, + resourceDestinationString: onexitresourcedestinationstring, + resourceTitleString: onexitresourcetitlestring, + setextHeading: onexitsetextheading, + setextHeadingLineSequence: onexitsetextheadinglinesequence, + setextHeadingText: onexitsetextheadingtext, + strong: onexitstrong, + thematicBreak: onexitthematicbreak + } + } // Combine the HTML extensions with the default handlers. + // An HTML extension is an object whose fields are either `enter` or `exit` + // (reflecting whether a token is entered or exited). + // The values at such objects are names of tokens mapping to handlers. + // Handlers are called, respectively when a token is opener or closed, with + // that token, and a context as `this`. + + var handlers = combineHtmlExtensions( + [defaultHandlers].concat(miniflat(settings.htmlExtensions)) + ) // Handlers do often need to keep track of some state. + // That state is provided here as a key-value store (an object). + + var data = { + tightStack: tightStack + } // The context for handlers references a couple of useful functions. + // In handlers from extensions, those can be accessed at `this`. + // For the handlers here, they can be accessed directly. + + var context = { + lineEndingIfNeeded: lineEndingIfNeeded, + options: settings, + encode: encode, + raw: raw, + tag: tag, + buffer: buffer, + resume: resume, + setData: setData, + getData: getData + } // Generally, micromark copies line endings (`'\r'`, `'\n'`, `'\r\n'`) in the + // markdown document over to the compiled HTML. + // In some cases, such as `> a`, CommonMark requires that extra line endings + // are added: `<blockquote>\n<p>a</p>\n</blockquote>`. + // This variable hold the default line ending when given (or `undefined`), + // and in the latter case will be updated to the first found line ending if + // there is one. + + var lineEndingStyle = settings.defaultLineEnding // Return the function that handles a slice of events. + + return compile // Deal w/ a slice of events. + // Return either the empty string if there’s nothing of note to return, or the + // result when done. + + function compile(events) { + // As definitions can come after references, we need to figure out the media + // (urls and titles) defined by them before handling the references. + // So, we do sort of what HTML does: put metadata at the start (in head), and + // then put content after (`body`). + var head = [] + var body = [] + var index + var start + var listStack + var handler + var result + index = -1 + start = 0 + listStack = [] + + while (++index < events.length) { + // Figure out the line ending style used in the document. + if ( + !lineEndingStyle && + (events[index][1].type === 'lineEnding' || + events[index][1].type === 'lineEndingBlank') + ) { + lineEndingStyle = events[index][2].sliceSerialize(events[index][1]) + } // Preprocess lists to infer whether the list is loose or not. + + if ( + events[index][1].type === 'listOrdered' || + events[index][1].type === 'listUnordered' + ) { + if (events[index][0] === 'enter') { + listStack.push(index) + } else { + prepareList(events.slice(listStack.pop(), index)) + } + } // Move definitions to the front. + + if (events[index][1].type === 'definition') { + if (events[index][0] === 'enter') { + body = chunkedPush(body, events.slice(start, index)) + start = index + } else { + head = chunkedPush(head, events.slice(start, index + 1)) + start = index + 1 + } + } + } + + head = chunkedPush(head, body) + head = chunkedPush(head, events.slice(start)) + result = head + index = -1 // Handle the start of the document, if defined. + + if (handlers.enter.null) { + handlers.enter.null.call(context) + } // Handle all events. + + while (++index < events.length) { + handler = handlers[result[index][0]] + + if (hasOwnProperty.call(handler, result[index][1].type)) { + handler[result[index][1].type].call( + assign( + { + sliceSerialize: result[index][2].sliceSerialize + }, + context + ), + result[index][1] + ) + } + } // Handle the end of the document, if defined. + + if (handlers.exit.null) { + handlers.exit.null.call(context) + } + + return buffers[0].join('') + } // Figure out whether lists are loose or not. + + function prepareList(slice) { + var length = slice.length - 1 // Skip close. + + var index = 0 // Skip open. + + var containerBalance = 0 + var loose + var atMarker + var event + + while (++index < length) { + event = slice[index] + + if (event[1]._container) { + atMarker = undefined + + if (event[0] === 'enter') { + containerBalance++ + } else { + containerBalance-- + } + } else if (event[1].type === 'listItemPrefix') { + if (event[0] === 'exit') { + atMarker = true + } + } else if (event[1].type === 'linePrefix'); + else if (event[1].type === 'lineEndingBlank') { + if (event[0] === 'enter' && !containerBalance) { + if (atMarker) { + atMarker = undefined + } else { + loose = true + } + } + } else { + atMarker = undefined + } + } + + slice[0][1]._loose = loose + } // Set data into the key-value store. + + function setData(key, value) { + data[key] = value + } // Get data from the key-value store. + + function getData(key) { + return data[key] + } // Capture some of the output data. + + function buffer() { + buffers.push([]) + } // Stop capturing and access the output data. + + function resume() { + return buffers.pop().join('') + } // Output (parts of) HTML tags. + + function tag(value) { + if (!tags) return + setData('lastWasTag', true) + buffers[buffers.length - 1].push(value) + } // Output raw data. + + function raw(value) { + setData('lastWasTag') + buffers[buffers.length - 1].push(value) + } // Output an extra line ending. + + function lineEnding() { + raw(lineEndingStyle || '\n') + } // Output an extra line ending if the previous value wasn’t EOF/EOL. + + function lineEndingIfNeeded() { + var buffer = buffers[buffers.length - 1] + var slice = buffer[buffer.length - 1] + var previous = slice ? slice.charCodeAt(slice.length - 1) : null + + if (previous === 10 || previous === 13 || previous === null) { + return + } + + lineEnding() + } // Make a value safe for injection in HTML (except w/ `ignoreEncode`). + + function encode(value) { + return getData('ignoreEncode') ? value : value.replace(/["&<>]/g, replace) + + function replace(value) { + return '&' + characterReferences[value] + ';' + } + } // Make a value safe for injection as a URL. + // This does encode unsafe characters with percent-encoding, skipping already + // encoded sequences (`normalizeUri`). + // Further unsafe characters are encoded as character references (`encode`). + // Finally, if the URL includes an unknown protocol (such as a dangerous + // example, `javascript:`), the value is ignored. + + function url(url, protocol) { + var value = encode(normalizeUri(url || '')) + var colon = value.indexOf(':') + var questionMark = value.indexOf('?') + var numberSign = value.indexOf('#') + var slash = value.indexOf('/') + + if ( + settings.allowDangerousProtocol || // If there is no protocol, it’s relative. + colon < 0 || // If the first colon is after a `?`, `#`, or `/`, it’s not a protocol. + (slash > -1 && colon > slash) || + (questionMark > -1 && colon > questionMark) || + (numberSign > -1 && colon > numberSign) || // It is a protocol, it should be allowed. + protocol.test(value.slice(0, colon)) + ) { + return value + } + + return '' + } // + // Handlers. + // + + function onenterlistordered(token) { + tightStack.push(!token._loose) + lineEndingIfNeeded() + tag('<ol') + setData('expectFirstItem', true) + } + + function onenterlistunordered(token) { + tightStack.push(!token._loose) + lineEndingIfNeeded() + tag('<ul') + setData('expectFirstItem', true) + } + + function onenterlistitemvalue(token) { + var value + + if (getData('expectFirstItem')) { + value = parseInt(this.sliceSerialize(token), 10) + + if (value !== 1) { + tag(' start="' + encode(String(value)) + '"') + } + } + } + + function onenterlistitemmarker() { + if (getData('expectFirstItem')) { + tag('>') + } else { + onexitlistitem() + } + + lineEndingIfNeeded() + tag('<li>') + setData('expectFirstItem') // “Hack” to prevent a line ending from showing up if the item is empty. + + setData('lastWasTag') + } + + function onexitlistordered() { + onexitlistitem() + tightStack.pop() + lineEnding() + tag('</ol>') + } + + function onexitlistunordered() { + onexitlistitem() + tightStack.pop() + lineEnding() + tag('</ul>') + } + + function onexitlistitem() { + if (getData('lastWasTag') && !getData('slurpAllLineEndings')) { + lineEndingIfNeeded() + } + + tag('</li>') + setData('slurpAllLineEndings') + } + + function onenterblockquote() { + tightStack.push(false) + lineEndingIfNeeded() + tag('<blockquote>') + } + + function onexitblockquote() { + tightStack.pop() + lineEndingIfNeeded() + tag('</blockquote>') + setData('slurpAllLineEndings') + } + + function onenterparagraph() { + if (!tightStack[tightStack.length - 1]) { + lineEndingIfNeeded() + tag('<p>') + } + + setData('slurpAllLineEndings') + } + + function onexitparagraph() { + if (tightStack[tightStack.length - 1]) { + setData('slurpAllLineEndings', true) + } else { + tag('</p>') + } + } + + function onentercodefenced() { + lineEndingIfNeeded() + tag('<pre><code') + setData('fencesCount', 0) + } + + function onexitcodefencedfenceinfo() { + var value = resume() + tag(' class="language-' + value + '"') + } + + function onexitcodefencedfence() { + if (!getData('fencesCount')) { + tag('>') + setData('fencedCodeInside', true) + setData('slurpOneLineEnding', true) + } + + setData('fencesCount', getData('fencesCount') + 1) + } + + function onentercodeindented() { + lineEndingIfNeeded() + tag('<pre><code>') + } + + function onexitflowcode() { + // Send an extra line feed if we saw data. + if (getData('flowCodeSeenData')) lineEndingIfNeeded() + tag('</code></pre>') + if (getData('fencesCount') < 2) lineEndingIfNeeded() + setData('flowCodeSeenData') + setData('fencesCount') + setData('slurpOneLineEnding') + } + + function onenterimage() { + mediaStack.push({ + image: true + }) + tags = undefined // Disallow tags. + } + + function onenterlink() { + mediaStack.push({}) + } + + function onexitlabeltext(token) { + mediaStack[mediaStack.length - 1].labelId = this.sliceSerialize(token) + } + + function onexitlabel() { + mediaStack[mediaStack.length - 1].label = resume() + } + + function onexitreferencestring(token) { + mediaStack[mediaStack.length - 1].referenceId = this.sliceSerialize(token) + } + + function onenterresource() { + buffer() // We can have line endings in the resource, ignore them. + + mediaStack[mediaStack.length - 1].destination = '' + } + + function onenterresourcedestinationstring() { + buffer() // Ignore encoding the result, as we’ll first percent encode the url and + // encode manually after. + + setData('ignoreEncode', true) + } + + function onexitresourcedestinationstring() { + mediaStack[mediaStack.length - 1].destination = resume() + setData('ignoreEncode') + } + + function onexitresourcetitlestring() { + mediaStack[mediaStack.length - 1].title = resume() + } + + function onexitmedia() { + var index = mediaStack.length - 1 // Skip current. + + var media = mediaStack[index] + var context = + media.destination === undefined + ? definitions[normalizeIdentifier(media.referenceId || media.labelId)] + : media + tags = true + + while (index--) { + if (mediaStack[index].image) { + tags = undefined + break + } + } + + if (media.image) { + tag('<img src="' + url(context.destination, protocolSrc) + '" alt="') + raw(media.label) + tag('"') + } else { + tag('<a href="' + url(context.destination, protocolHref) + '"') + } + + tag(context.title ? ' title="' + context.title + '"' : '') + + if (media.image) { + tag(' />') + } else { + tag('>') + raw(media.label) + tag('</a>') + } + + mediaStack.pop() + } + + function onenterdefinition() { + buffer() + mediaStack.push({}) + } + + function onexitdefinitionlabelstring(token) { + // Discard label, use the source content instead. + resume() + mediaStack[mediaStack.length - 1].labelId = this.sliceSerialize(token) + } + + function onenterdefinitiondestinationstring() { + buffer() + setData('ignoreEncode', true) + } + + function onexitdefinitiondestinationstring() { + mediaStack[mediaStack.length - 1].destination = resume() + setData('ignoreEncode') + } + + function onexitdefinitiontitlestring() { + mediaStack[mediaStack.length - 1].title = resume() + } + + function onexitdefinition() { + var id = normalizeIdentifier(mediaStack[mediaStack.length - 1].labelId) + resume() + + if (!hasOwnProperty.call(definitions, id)) { + definitions[id] = mediaStack[mediaStack.length - 1] + } + + mediaStack.pop() + } + + function onentercontent() { + setData('slurpAllLineEndings', true) + } + + function onexitatxheadingsequence(token) { + // Exit for further sequences. + if (getData('headingRank')) return + setData('headingRank', this.sliceSerialize(token).length) + lineEndingIfNeeded() + tag('<h' + getData('headingRank') + '>') + } + + function onentersetextheading() { + buffer() + setData('slurpAllLineEndings') + } + + function onexitsetextheadingtext() { + setData('slurpAllLineEndings', true) + } + + function onexitatxheading() { + tag('</h' + getData('headingRank') + '>') + setData('headingRank') + } + + function onexitsetextheadinglinesequence(token) { + setData( + 'headingRank', + this.sliceSerialize(token).charCodeAt(0) === 61 ? 1 : 2 + ) + } + + function onexitsetextheading() { + var value = resume() + lineEndingIfNeeded() + tag('<h' + getData('headingRank') + '>') + raw(value) + tag('</h' + getData('headingRank') + '>') + setData('slurpAllLineEndings') + setData('headingRank') + } + + function onexitdata(token) { + raw(encode(this.sliceSerialize(token))) + } + + function onexitlineending(token) { + if (getData('slurpAllLineEndings')) { + return + } + + if (getData('slurpOneLineEnding')) { + setData('slurpOneLineEnding') + return + } + + if (getData('inCodeText')) { + raw(' ') + return + } + + raw(encode(this.sliceSerialize(token))) + } + + function onexitcodeflowvalue(token) { + raw(encode(this.sliceSerialize(token))) + setData('flowCodeSeenData', true) + } + + function onexithardbreak() { + tag('<br />') + } + + function onenterhtmlflow() { + lineEndingIfNeeded() + onenterhtml() + } + + function onexithtml() { + setData('ignoreEncode') + } + + function onenterhtml() { + if (settings.allowDangerousHtml) { + setData('ignoreEncode', true) + } + } + + function onenteremphasis() { + tag('<em>') + } + + function onenterstrong() { + tag('<strong>') + } + + function onentercodetext() { + setData('inCodeText', true) + tag('<code>') + } + + function onexitcodetext() { + setData('inCodeText') + tag('</code>') + } + + function onexitemphasis() { + tag('</em>') + } + + function onexitstrong() { + tag('</strong>') + } + + function onexitthematicbreak() { + lineEndingIfNeeded() + tag('<hr />') + } + + function onexitcharacterreferencemarker(token) { + setData('characterReferenceType', token.type) + } + + function onexitcharacterreferencevalue(token) { + var value = this.sliceSerialize(token) + value = getData('characterReferenceType') + ? safeFromInt( + value, + getData('characterReferenceType') === + 'characterReferenceMarkerNumeric' + ? 10 + : 16 + ) + : decodeEntity__default['default'](value) + raw(encode(value)) + setData('characterReferenceType') + } + + function onexitautolinkprotocol(token) { + var uri = this.sliceSerialize(token) + tag('<a href="' + url(uri, protocolHref) + '">') + raw(encode(uri)) + tag('</a>') + } + + function onexitautolinkemail(token) { + var uri = this.sliceSerialize(token) + tag('<a href="' + url('mailto:' + uri, protocolHref) + '">') + raw(encode(uri)) + tag('</a>') + } +} + +module.exports = compileHtml diff --git a/node_modules/micromark/dist/constant/assign.js b/node_modules/micromark/dist/constant/assign.js new file mode 100644 index 00000000..b6ae48a0 --- /dev/null +++ b/node_modules/micromark/dist/constant/assign.js @@ -0,0 +1,5 @@ +'use strict' + +var assign = Object.assign + +module.exports = assign diff --git a/node_modules/micromark/dist/constant/constants.d.ts b/node_modules/micromark/dist/constant/constants.d.ts new file mode 100644 index 00000000..846689a9 --- /dev/null +++ b/node_modules/micromark/dist/constant/constants.d.ts @@ -0,0 +1,23 @@ +// This module is generated by `script/`. + +export type Constant = + | 1 + | 2 + | 6 + | 63 + | 32 + | 'CDATA[' + | 7 + | 31 + | 3 + | 'flow' + | 'content' + | 'string' + | 'text' + | 4 + | 5 + | 8 + | 999 + | 10 + | 16 + | 10000 diff --git a/node_modules/micromark/dist/constant/constants.js b/node_modules/micromark/dist/constant/constants.js new file mode 100644 index 00000000..88772494 --- /dev/null +++ b/node_modules/micromark/dist/constant/constants.js @@ -0,0 +1,71 @@ +'use strict' + +// This module is compiled away! +// +// Parsing markdown comes with a couple of constants, such as minimum or maximum +// sizes of certain sequences. +// Additionally, there are a couple symbols used inside micromark. +// These are all defined here, but compiled away by scripts. +var constants = { + attentionSideBefore: 1, + // Symbol to mark an attention sequence as before content: `*a` + attentionSideAfter: 2, + // Symbol to mark an attention sequence as after content: `a*` + atxHeadingOpeningFenceSizeMax: 6, + // 6 number signs is fine, 7 isn’t. + autolinkDomainSizeMax: 63, + // 63 characters is fine, 64 is too many. + autolinkSchemeSizeMax: 32, + // 32 characters is fine, 33 is too many. + cdataOpeningString: 'CDATA[', + // And preceded by `<![`. + characterGroupWhitespace: 1, + // Symbol used to indicate a character is whitespace + characterGroupPunctuation: 2, + // Symbol used to indicate a character is whitespace + characterReferenceDecimalSizeMax: 7, + // `&#9999999;`. + characterReferenceHexadecimalSizeMax: 6, + // `&#xff9999;`. + characterReferenceNamedSizeMax: 31, + // `&CounterClockwiseContourIntegral;`. + codeFencedSequenceSizeMin: 3, + // At least 3 ticks or tildes are needed. + contentTypeFlow: 'flow', + contentTypeContent: 'content', + contentTypeString: 'string', + contentTypeText: 'text', + hardBreakPrefixSizeMin: 2, + // At least 2 trailing spaces are needed. + htmlRaw: 1, + // Symbol for `<script>` + htmlComment: 2, + // Symbol for `<!---->` + htmlInstruction: 3, + // Symbol for `<?php?>` + htmlDeclaration: 4, + // Symbol for `<!doctype>` + htmlCdata: 5, + // Symbol for `<![CDATA[]]>` + htmlBasic: 6, + // Symbol for `<div` + htmlComplete: 7, + // Symbol for `<x>` + htmlRawSizeMax: 8, + // Length of `textarea`. + linkResourceDestinationBalanceMax: 3, + // See: <https://spec.commonmark.org/0.29/#link-destination> + linkReferenceSizeMax: 999, + // See: <https://spec.commonmark.org/0.29/#link-label> + listItemValueSizeMax: 10, + // See: <https://spec.commonmark.org/0.29/#ordered-list-marker> + numericBaseDecimal: 10, + numericBaseHexadecimal: 0x10, + tabSize: 4, + // Tabs have a hard-coded size of 4, per CommonMark. + thematicBreakMarkerCountMin: 3, + // At least 3 asterisks, dashes, or underscores are needed. + v8MaxSafeChunkSize: 10000 // V8 (and potentially others) have problems injecting giant arrays into other arrays, hence we operate in chunks. +} + +module.exports = constants diff --git a/node_modules/micromark/dist/constant/from-char-code.js b/node_modules/micromark/dist/constant/from-char-code.js new file mode 100644 index 00000000..232eac74 --- /dev/null +++ b/node_modules/micromark/dist/constant/from-char-code.js @@ -0,0 +1,5 @@ +'use strict' + +var fromCharCode = String.fromCharCode + +module.exports = fromCharCode diff --git a/node_modules/micromark/dist/constant/has-own-property.js b/node_modules/micromark/dist/constant/has-own-property.js new file mode 100644 index 00000000..aa9197cd --- /dev/null +++ b/node_modules/micromark/dist/constant/has-own-property.js @@ -0,0 +1,5 @@ +'use strict' + +var own = {}.hasOwnProperty + +module.exports = own diff --git a/node_modules/micromark/dist/constant/html-block-names.js b/node_modules/micromark/dist/constant/html-block-names.js new file mode 100644 index 00000000..9b5ada73 --- /dev/null +++ b/node_modules/micromark/dist/constant/html-block-names.js @@ -0,0 +1,69 @@ +'use strict' + +// This module is copied from <https://spec.commonmark.org/0.29/#html-blocks>. +var basics = [ + 'address', + 'article', + 'aside', + 'base', + 'basefont', + 'blockquote', + 'body', + 'caption', + 'center', + 'col', + 'colgroup', + 'dd', + 'details', + 'dialog', + 'dir', + 'div', + 'dl', + 'dt', + 'fieldset', + 'figcaption', + 'figure', + 'footer', + 'form', + 'frame', + 'frameset', + 'h1', + 'h2', + 'h3', + 'h4', + 'h5', + 'h6', + 'head', + 'header', + 'hr', + 'html', + 'iframe', + 'legend', + 'li', + 'link', + 'main', + 'menu', + 'menuitem', + 'nav', + 'noframes', + 'ol', + 'optgroup', + 'option', + 'p', + 'param', + 'section', + 'source', + 'summary', + 'table', + 'tbody', + 'td', + 'tfoot', + 'th', + 'thead', + 'title', + 'tr', + 'track', + 'ul' +] + +module.exports = basics diff --git a/node_modules/micromark/dist/constant/html-raw-names.js b/node_modules/micromark/dist/constant/html-raw-names.js new file mode 100644 index 00000000..c22a3954 --- /dev/null +++ b/node_modules/micromark/dist/constant/html-raw-names.js @@ -0,0 +1,6 @@ +'use strict' + +// This module is copied from <https://spec.commonmark.org/0.29/#html-blocks>. +var raws = ['pre', 'script', 'style', 'textarea'] + +module.exports = raws diff --git a/node_modules/micromark/dist/constant/splice.js b/node_modules/micromark/dist/constant/splice.js new file mode 100644 index 00000000..8917210a --- /dev/null +++ b/node_modules/micromark/dist/constant/splice.js @@ -0,0 +1,5 @@ +'use strict' + +var splice = [].splice + +module.exports = splice diff --git a/node_modules/micromark/dist/constant/types.d.ts b/node_modules/micromark/dist/constant/types.d.ts new file mode 100644 index 00000000..fa45a590 --- /dev/null +++ b/node_modules/micromark/dist/constant/types.d.ts @@ -0,0 +1,3 @@ +// This module is generated by `script/`. + +export type Type = string diff --git a/node_modules/micromark/dist/constant/types.js b/node_modules/micromark/dist/constant/types.js new file mode 100644 index 00000000..b4e8787f --- /dev/null +++ b/node_modules/micromark/dist/constant/types.js @@ -0,0 +1,357 @@ +'use strict' + +// This module is compiled away! +// +// Here is the list of all types of tokens exposed by micromark, with a short +// explanation of what they include and where they are found. +// In picking names, generally, the rule is to be as explicit as possible +// instead of reusing names. +// For example, there is a `definitionDestination` and a `resourceDestination`, +// instead of one shared name. +var types = { + // Generic type for data, such as in a title, a destination, etc. + data: 'data', + // Generic type for syntactic whitespace (tabs, virtual spaces, spaces). + // Such as, between a fenced code fence and an info string. + whitespace: 'whitespace', + // Generic type for line endings (line feed, carriage return, carriage return + + // line feed). + lineEnding: 'lineEnding', + // A line ending, but ending a blank line. + lineEndingBlank: 'lineEndingBlank', + // Generic type for whitespace (tabs, virtual spaces, spaces) at the start of a + // line. + linePrefix: 'linePrefix', + // Generic type for whitespace (tabs, virtual spaces, spaces) at the end of a + // line. + lineSuffix: 'lineSuffix', + // Whole ATX heading: + // + // ```markdown + // # + // ## Alpha + // ### Bravo ### + // ``` + // + // Includes `atxHeadingSequence`, `whitespace`, `atxHeadingText`. + atxHeading: 'atxHeading', + // Sequence of number signs in an ATX heading (`###`). + atxHeadingSequence: 'atxHeadingSequence', + // Content in an ATX heading (`alpha`). + // Includes text. + atxHeadingText: 'atxHeadingText', + // Whole autolink (`<https://example.com>` or `<admin@example.com>`) + // Includes `autolinkMarker` and `autolinkProtocol` or `autolinkEmail`. + autolink: 'autolink', + // Email autolink w/o markers (`admin@example.com`) + autolinkEmail: 'autolinkEmail', + // Marker around an `autolinkProtocol` or `autolinkEmail` (`<` or `>`). + autolinkMarker: 'autolinkMarker', + // Protocol autolink w/o markers (`https://example.com`) + autolinkProtocol: 'autolinkProtocol', + // A whole character escape (`\-`). + // Includes `escapeMarker` and `characterEscapeValue`. + characterEscape: 'characterEscape', + // The escaped character (`-`). + characterEscapeValue: 'characterEscapeValue', + // A whole character reference (`&amp;`, `&#8800;`, or `&#x1D306;`). + // Includes `characterReferenceMarker`, an optional + // `characterReferenceMarkerNumeric`, in which case an optional + // `characterReferenceMarkerHexadecimal`, and a `characterReferenceValue`. + characterReference: 'characterReference', + // The start or end marker (`&` or `;`). + characterReferenceMarker: 'characterReferenceMarker', + // Mark reference as numeric (`#`). + characterReferenceMarkerNumeric: 'characterReferenceMarkerNumeric', + // Mark reference as numeric (`x` or `X`). + characterReferenceMarkerHexadecimal: 'characterReferenceMarkerHexadecimal', + // Value of character reference w/o markers (`amp`, `8800`, or `1D306`). + characterReferenceValue: 'characterReferenceValue', + // Whole fenced code: + // + // ````markdown + // ```js + // alert(1) + // ``` + // ```` + codeFenced: 'codeFenced', + // A fenced code fence, including whitespace, sequence, info, and meta + // (` ```js `). + codeFencedFence: 'codeFencedFence', + // Sequence of grave accent or tilde characters (` ``` `) in a fence. + codeFencedFenceSequence: 'codeFencedFenceSequence', + // Info word (`js`) in a fence. + // Includes string. + codeFencedFenceInfo: 'codeFencedFenceInfo', + // Meta words (`highlight="1"`) in a fence. + // Includes string. + codeFencedFenceMeta: 'codeFencedFenceMeta', + // A line of code. + codeFlowValue: 'codeFlowValue', + // Whole indented code: + // + // ```markdown + // alert(1) + // ``` + // + // Includes `lineEnding`, `linePrefix`, and `codeFlowValue`. + codeIndented: 'codeIndented', + // A text code (``` `alpha` ```). + // Includes `codeTextSequence`, `codeTextData`, `lineEnding`, and can include + // `codeTextPadding`. + codeText: 'codeText', + codeTextData: 'codeTextData', + // A space or line ending right after or before a tick. + codeTextPadding: 'codeTextPadding', + // A text code fence (` `` `). + codeTextSequence: 'codeTextSequence', + // Whole content: + // + // ```markdown + // [a]: b + // c + // = + // d + // ``` + // + // Includes `paragraph` and `definition`. + content: 'content', + // Whole definition: + // + // ```markdown + // [micromark]: https://github.com/micromark/micromark + // ``` + // + // Includes `definitionLabel`, `definitionMarker`, `whitespace`, + // `definitionDestination`, and optionally `lineEnding` and `definitionTitle`. + definition: 'definition', + // Destination of a definition (`https://github.com/micromark/micromark` or + // `<https://github.com/micromark/micromark>`). + // Includes `definitionDestinationLiteral` or `definitionDestinationRaw`. + definitionDestination: 'definitionDestination', + // Enclosed destination of a definition + // (`<https://github.com/micromark/micromark>`). + // Includes `definitionDestinationLiteralMarker` and optionally + // `definitionDestinationString`. + definitionDestinationLiteral: 'definitionDestinationLiteral', + // Markers of an enclosed definition destination (`<` or `>`). + definitionDestinationLiteralMarker: 'definitionDestinationLiteralMarker', + // Unenclosed destination of a definition + // (`https://github.com/micromark/micromark`). + // Includes `definitionDestinationString`. + definitionDestinationRaw: 'definitionDestinationRaw', + // Text in an destination (`https://github.com/micromark/micromark`). + // Includes string. + definitionDestinationString: 'definitionDestinationString', + // Label of a definition (`[micromark]`). + // Includes `definitionLabelMarker` and `definitionLabelString`. + definitionLabel: 'definitionLabel', + // Markers of a definition label (`[` or `]`). + definitionLabelMarker: 'definitionLabelMarker', + // Value of a definition label (`micromark`). + // Includes string. + definitionLabelString: 'definitionLabelString', + // Marker between a label and a destination (`:`). + definitionMarker: 'definitionMarker', + // Title of a definition (`"x"`, `'y'`, or `(z)`). + // Includes `definitionTitleMarker` and optionally `definitionTitleString`. + definitionTitle: 'definitionTitle', + // Marker around a title of a definition (`"`, `'`, `(`, or `)`). + definitionTitleMarker: 'definitionTitleMarker', + // Data without markers in a title (`z`). + // Includes string. + definitionTitleString: 'definitionTitleString', + // Emphasis (`*alpha*`). + // Includes `emphasisSequence` and `emphasisText`. + emphasis: 'emphasis', + // Sequence of emphasis markers (`*` or `_`). + emphasisSequence: 'emphasisSequence', + // Emphasis text (`alpha`). + // Includes text. + emphasisText: 'emphasisText', + // The character escape marker (`\`). + escapeMarker: 'escapeMarker', + // A hard break created with a backslash (`\\n`). + // Includes `escapeMarker` (does not include the line ending) + hardBreakEscape: 'hardBreakEscape', + // A hard break created with trailing spaces (` \n`). + // Does not include the line ending. + hardBreakTrailing: 'hardBreakTrailing', + // Flow HTML: + // + // ```markdown + // <div + // ``` + // + // Inlcudes `lineEnding`, `htmlFlowData`. + htmlFlow: 'htmlFlow', + htmlFlowData: 'htmlFlowData', + // HTML in text (the tag in `a <i> b`). + // Includes `lineEnding`, `htmlTextData`. + htmlText: 'htmlText', + htmlTextData: 'htmlTextData', + // Whole image (`![alpha](bravo)`, `![alpha][bravo]`, `![alpha][]`, or + // `![alpha]`). + // Includes `label` and an optional `resource` or `reference`. + image: 'image', + // Whole link label (`[*alpha*]`). + // Includes `labelLink` or `labelImage`, `labelText`, and `labelEnd`. + label: 'label', + // Text in an label (`*alpha*`). + // Includes text. + labelText: 'labelText', + // Start a link label (`[`). + // Includes a `labelMarker`. + labelLink: 'labelLink', + // Start an image label (`![`). + // Includes `labelImageMarker` and `labelMarker`. + labelImage: 'labelImage', + // Marker of a label (`[` or `]`). + labelMarker: 'labelMarker', + // Marker to start an image (`!`). + labelImageMarker: 'labelImageMarker', + // End a label (`]`). + // Includes `labelMarker`. + labelEnd: 'labelEnd', + // Whole link (`[alpha](bravo)`, `[alpha][bravo]`, `[alpha][]`, or `[alpha]`). + // Includes `label` and an optional `resource` or `reference`. + link: 'link', + // Whole paragraph: + // + // ```markdown + // alpha + // bravo. + // ``` + // + // Includes text. + paragraph: 'paragraph', + // A reference (`[alpha]` or `[]`). + // Includes `referenceMarker` and an optional `referenceString`. + reference: 'reference', + // A reference marker (`[` or `]`). + referenceMarker: 'referenceMarker', + // Reference text (`alpha`). + // Includes string. + referenceString: 'referenceString', + // A resource (`(https://example.com "alpha")`). + // Includes `resourceMarker`, an optional `resourceDestination` with an optional + // `whitespace` and `resourceTitle`. + resource: 'resource', + // A resource destination (`https://example.com`). + // Includes `resourceDestinationLiteral` or `resourceDestinationRaw`. + resourceDestination: 'resourceDestination', + // A literal resource destination (`<https://example.com>`). + // Includes `resourceDestinationLiteralMarker` and optionally + // `resourceDestinationString`. + resourceDestinationLiteral: 'resourceDestinationLiteral', + // A resource destination marker (`<` or `>`). + resourceDestinationLiteralMarker: 'resourceDestinationLiteralMarker', + // A raw resource destination (`https://example.com`). + // Includes `resourceDestinationString`. + resourceDestinationRaw: 'resourceDestinationRaw', + // Resource destination text (`https://example.com`). + // Includes string. + resourceDestinationString: 'resourceDestinationString', + // A resource marker (`(` or `)`). + resourceMarker: 'resourceMarker', + // A resource title (`"alpha"`, `'alpha'`, or `(alpha)`). + // Includes `resourceTitleMarker` and optionally `resourceTitleString`. + resourceTitle: 'resourceTitle', + // A resource title marker (`"`, `'`, `(`, or `)`). + resourceTitleMarker: 'resourceTitleMarker', + // Resource destination title (`alpha`). + // Includes string. + resourceTitleString: 'resourceTitleString', + // Whole setext heading: + // + // ```markdown + // alpha + // bravo + // ===== + // ``` + // + // Includes `setextHeadingText`, `lineEnding`, `linePrefix`, and + // `setextHeadingLine`. + setextHeading: 'setextHeading', + // Content in a setext heading (`alpha\nbravo`). + // Includes text. + setextHeadingText: 'setextHeadingText', + // Underline in a setext heading, including whitespace suffix (`==`). + // Includes `setextHeadingLineSequence`. + setextHeadingLine: 'setextHeadingLine', + // Sequence of equals or dash characters in underline in a setext heading (`-`). + setextHeadingLineSequence: 'setextHeadingLineSequence', + // Strong (`**alpha**`). + // Includes `strongSequence` and `strongText`. + strong: 'strong', + // Sequence of strong markers (`**` or `__`). + strongSequence: 'strongSequence', + // Strong text (`alpha`). + // Includes text. + strongText: 'strongText', + // Whole thematic break: + // + // ```markdown + // * * * + // ``` + // + // Includes `thematicBreakSequence` and `whitespace`. + thematicBreak: 'thematicBreak', + // A sequence of one or more thematic break markers (`***`). + thematicBreakSequence: 'thematicBreakSequence', + // Whole block quote: + // + // ```markdown + // > a + // > + // > b + // ``` + // + // Includes `blockQuotePrefix` and flow. + blockQuote: 'blockQuote', + // The `>` or `> ` of a block quote. + blockQuotePrefix: 'blockQuotePrefix', + // The `>` of a block quote prefix. + blockQuoteMarker: 'blockQuoteMarker', + // The optional ` ` of a block quote prefix. + blockQuotePrefixWhitespace: 'blockQuotePrefixWhitespace', + // Whole unordered list: + // + // ```markdown + // - a + // b + // ``` + // + // Includes `listItemPrefix`, flow, and optionally `listItemIndent` on further + // lines. + listOrdered: 'listOrdered', + // Whole ordered list: + // + // ```markdown + // 1. a + // b + // ``` + // + // Includes `listItemPrefix`, flow, and optionally `listItemIndent` on further + // lines. + listUnordered: 'listUnordered', + // The indent of further list item lines. + listItemIndent: 'listItemIndent', + // A marker, as in, `*`, `+`, `-`, `.`, or `)`. + listItemMarker: 'listItemMarker', + // The thing that starts a list item, such as `1. `. + // Includes `listItemValue` if ordered, `listItemMarker`, and + // `listItemPrefixWhitespace` (unless followed by a line ending). + listItemPrefix: 'listItemPrefix', + // The whitespace after a marker. + listItemPrefixWhitespace: 'listItemPrefixWhitespace', + // The numerical value of an ordered item. + listItemValue: 'listItemValue', + // Internal types used for subtokenizers, compiled away + chunkContent: 'chunkContent', + chunkFlow: 'chunkFlow', + chunkText: 'chunkText', + chunkString: 'chunkString' +} + +module.exports = types diff --git a/node_modules/micromark/dist/constant/unicode-punctuation-regex.js b/node_modules/micromark/dist/constant/unicode-punctuation-regex.js new file mode 100644 index 00000000..6d25ee4b --- /dev/null +++ b/node_modules/micromark/dist/constant/unicode-punctuation-regex.js @@ -0,0 +1,11 @@ +'use strict' + +// This module is generated by `script/`. +// +// CommonMark handles attention (emphasis, strong) markers based on what comes +// before or after them. +// One such difference is if those characters are Unicode punctuation. +// This script is generated from the Unicode data. +var unicodePunctuation = /[!-\/:-@\[-`\{-~\xA1\xA7\xAB\xB6\xB7\xBB\xBF\u037E\u0387\u055A-\u055F\u0589\u058A\u05BE\u05C0\u05C3\u05C6\u05F3\u05F4\u0609\u060A\u060C\u060D\u061B\u061E\u061F\u066A-\u066D\u06D4\u0700-\u070D\u07F7-\u07F9\u0830-\u083E\u085E\u0964\u0965\u0970\u09FD\u0A76\u0AF0\u0C77\u0C84\u0DF4\u0E4F\u0E5A\u0E5B\u0F04-\u0F12\u0F14\u0F3A-\u0F3D\u0F85\u0FD0-\u0FD4\u0FD9\u0FDA\u104A-\u104F\u10FB\u1360-\u1368\u1400\u166E\u169B\u169C\u16EB-\u16ED\u1735\u1736\u17D4-\u17D6\u17D8-\u17DA\u1800-\u180A\u1944\u1945\u1A1E\u1A1F\u1AA0-\u1AA6\u1AA8-\u1AAD\u1B5A-\u1B60\u1BFC-\u1BFF\u1C3B-\u1C3F\u1C7E\u1C7F\u1CC0-\u1CC7\u1CD3\u2010-\u2027\u2030-\u2043\u2045-\u2051\u2053-\u205E\u207D\u207E\u208D\u208E\u2308-\u230B\u2329\u232A\u2768-\u2775\u27C5\u27C6\u27E6-\u27EF\u2983-\u2998\u29D8-\u29DB\u29FC\u29FD\u2CF9-\u2CFC\u2CFE\u2CFF\u2D70\u2E00-\u2E2E\u2E30-\u2E4F\u2E52\u3001-\u3003\u3008-\u3011\u3014-\u301F\u3030\u303D\u30A0\u30FB\uA4FE\uA4FF\uA60D-\uA60F\uA673\uA67E\uA6F2-\uA6F7\uA874-\uA877\uA8CE\uA8CF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA95F\uA9C1-\uA9CD\uA9DE\uA9DF\uAA5C-\uAA5F\uAADE\uAADF\uAAF0\uAAF1\uABEB\uFD3E\uFD3F\uFE10-\uFE19\uFE30-\uFE52\uFE54-\uFE61\uFE63\uFE68\uFE6A\uFE6B\uFF01-\uFF03\uFF05-\uFF0A\uFF0C-\uFF0F\uFF1A\uFF1B\uFF1F\uFF20\uFF3B-\uFF3D\uFF3F\uFF5B\uFF5D\uFF5F-\uFF65]/ + +module.exports = unicodePunctuation diff --git a/node_modules/micromark/dist/constructs.js b/node_modules/micromark/dist/constructs.js new file mode 100644 index 00000000..adcc84a4 --- /dev/null +++ b/node_modules/micromark/dist/constructs.js @@ -0,0 +1,127 @@ +'use strict' + +Object.defineProperty(exports, '__esModule', {value: true}) + +var text$1 = require('./initialize/text.js') +var attention = require('./tokenize/attention.js') +var autolink = require('./tokenize/autolink.js') +var blockQuote = require('./tokenize/block-quote.js') +var characterEscape = require('./tokenize/character-escape.js') +var characterReference = require('./tokenize/character-reference.js') +var codeFenced = require('./tokenize/code-fenced.js') +var codeIndented = require('./tokenize/code-indented.js') +var codeText = require('./tokenize/code-text.js') +var definition = require('./tokenize/definition.js') +var hardBreakEscape = require('./tokenize/hard-break-escape.js') +var headingAtx = require('./tokenize/heading-atx.js') +var htmlFlow = require('./tokenize/html-flow.js') +var htmlText = require('./tokenize/html-text.js') +var labelEnd = require('./tokenize/label-end.js') +var labelStartImage = require('./tokenize/label-start-image.js') +var labelStartLink = require('./tokenize/label-start-link.js') +var lineEnding = require('./tokenize/line-ending.js') +var list = require('./tokenize/list.js') +var setextUnderline = require('./tokenize/setext-underline.js') +var thematicBreak = require('./tokenize/thematic-break.js') + +var document = { + 42: list, + // Asterisk + 43: list, + // Plus sign + 45: list, + // Dash + 48: list, + // 0 + 49: list, + // 1 + 50: list, + // 2 + 51: list, + // 3 + 52: list, + // 4 + 53: list, + // 5 + 54: list, + // 6 + 55: list, + // 7 + 56: list, + // 8 + 57: list, + // 9 + 62: blockQuote // Greater than +} +var contentInitial = { + 91: definition // Left square bracket +} +var flowInitial = { + '-2': codeIndented, + // Horizontal tab + '-1': codeIndented, + // Virtual space + 32: codeIndented // Space +} +var flow = { + 35: headingAtx, + // Number sign + 42: thematicBreak, + // Asterisk + 45: [setextUnderline, thematicBreak], + // Dash + 60: htmlFlow, + // Less than + 61: setextUnderline, + // Equals to + 95: thematicBreak, + // Underscore + 96: codeFenced, + // Grave accent + 126: codeFenced // Tilde +} +var string = { + 38: characterReference, + // Ampersand + 92: characterEscape // Backslash +} +var text = { + '-5': lineEnding, + // Carriage return + '-4': lineEnding, + // Line feed + '-3': lineEnding, + // Carriage return + line feed + 33: labelStartImage, + // Exclamation mark + 38: characterReference, + // Ampersand + 42: attention, + // Asterisk + 60: [autolink, htmlText], + // Less than + 91: labelStartLink, + // Left square bracket + 92: [hardBreakEscape, characterEscape], + // Backslash + 93: labelEnd, + // Right square bracket + 95: attention, + // Underscore + 96: codeText // Grave accent +} +var insideSpan = { + null: [attention, text$1.resolver] +} +var disable = { + null: [] +} + +exports.contentInitial = contentInitial +exports.disable = disable +exports.document = document +exports.flow = flow +exports.flowInitial = flowInitial +exports.insideSpan = insideSpan +exports.string = string +exports.text = text diff --git a/node_modules/micromark/dist/index.d.ts b/node_modules/micromark/dist/index.d.ts new file mode 100644 index 00000000..aa009154 --- /dev/null +++ b/node_modules/micromark/dist/index.d.ts @@ -0,0 +1,11 @@ +import {Buffer, BufferEncoding, Options} from './shared-types' + +declare function buffer(value: string | Buffer, options?: Options): string + +declare function buffer( + value: string | Buffer, + encoding?: BufferEncoding, + options?: Options +): string + +export default buffer diff --git a/node_modules/micromark/dist/index.js b/node_modules/micromark/dist/index.js new file mode 100644 index 00000000..8b289a29 --- /dev/null +++ b/node_modules/micromark/dist/index.js @@ -0,0 +1,21 @@ +'use strict' + +var html = require('./compile/html.js') +var parse = require('./parse.js') +var postprocess = require('./postprocess.js') +var preprocess = require('./preprocess.js') + +function buffer(value, encoding, options) { + if (typeof encoding !== 'string') { + options = encoding + encoding = undefined + } + + return html(options)( + postprocess( + parse(options).document().write(preprocess()(value, encoding, true)) + ) + ) +} + +module.exports = buffer diff --git a/node_modules/micromark/dist/initialize/content.js b/node_modules/micromark/dist/initialize/content.js new file mode 100644 index 00000000..546aafec --- /dev/null +++ b/node_modules/micromark/dist/initialize/content.js @@ -0,0 +1,69 @@ +'use strict' + +Object.defineProperty(exports, '__esModule', {value: true}) + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var factorySpace = require('../tokenize/factory-space.js') + +var tokenize = initializeContent + +function initializeContent(effects) { + var contentStart = effects.attempt( + this.parser.constructs.contentInitial, + afterContentStartConstruct, + paragraphInitial + ) + var previous + return contentStart + + function afterContentStartConstruct(code) { + if (code === null) { + effects.consume(code) + return + } + + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + return factorySpace(effects, contentStart, 'linePrefix') + } + + function paragraphInitial(code) { + effects.enter('paragraph') + return lineStart(code) + } + + function lineStart(code) { + var token = effects.enter('chunkText', { + contentType: 'text', + previous: previous + }) + + if (previous) { + previous.next = token + } + + previous = token + return data(code) + } + + function data(code) { + if (code === null) { + effects.exit('chunkText') + effects.exit('paragraph') + effects.consume(code) + return + } + + if (markdownLineEnding(code)) { + effects.consume(code) + effects.exit('chunkText') + return lineStart + } // Data. + + effects.consume(code) + return data + } +} + +exports.tokenize = tokenize diff --git a/node_modules/micromark/dist/initialize/document.js b/node_modules/micromark/dist/initialize/document.js new file mode 100644 index 00000000..fa357fc3 --- /dev/null +++ b/node_modules/micromark/dist/initialize/document.js @@ -0,0 +1,237 @@ +'use strict' + +Object.defineProperty(exports, '__esModule', {value: true}) + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var factorySpace = require('../tokenize/factory-space.js') +var partialBlankLine = require('../tokenize/partial-blank-line.js') + +var tokenize = initializeDocument +var containerConstruct = { + tokenize: tokenizeContainer +} +var lazyFlowConstruct = { + tokenize: tokenizeLazyFlow +} + +function initializeDocument(effects) { + var self = this + var stack = [] + var continued = 0 + var inspectConstruct = { + tokenize: tokenizeInspect, + partial: true + } + var inspectResult + var childFlow + var childToken + return start + + function start(code) { + if (continued < stack.length) { + self.containerState = stack[continued][1] + return effects.attempt( + stack[continued][0].continuation, + documentContinue, + documentContinued + )(code) + } + + return documentContinued(code) + } + + function documentContinue(code) { + continued++ + return start(code) + } + + function documentContinued(code) { + // If we’re in a concrete construct (such as when expecting another line of + // HTML, or we resulted in lazy content), we can immediately start flow. + if (inspectResult && inspectResult.flowContinue) { + return flowStart(code) + } + + self.interrupt = + childFlow && + childFlow.currentConstruct && + childFlow.currentConstruct.interruptible + self.containerState = {} + return effects.attempt( + containerConstruct, + containerContinue, + flowStart + )(code) + } + + function containerContinue(code) { + stack.push([self.currentConstruct, self.containerState]) + self.containerState = undefined + return documentContinued(code) + } + + function flowStart(code) { + if (code === null) { + exitContainers(0, true) + effects.consume(code) + return + } + + childFlow = childFlow || self.parser.flow(self.now()) + effects.enter('chunkFlow', { + contentType: 'flow', + previous: childToken, + _tokenizer: childFlow + }) + return flowContinue(code) + } + + function flowContinue(code) { + if (code === null) { + continueFlow(effects.exit('chunkFlow')) + return flowStart(code) + } + + if (markdownLineEnding(code)) { + effects.consume(code) + continueFlow(effects.exit('chunkFlow')) + return effects.check(inspectConstruct, documentAfterPeek) + } + + effects.consume(code) + return flowContinue + } + + function documentAfterPeek(code) { + exitContainers( + inspectResult.continued, + inspectResult && inspectResult.flowEnd + ) + continued = 0 + return start(code) + } + + function continueFlow(token) { + if (childToken) childToken.next = token + childToken = token + childFlow.lazy = inspectResult && inspectResult.lazy + childFlow.defineSkip(token.start) + childFlow.write(self.sliceStream(token)) + } + + function exitContainers(size, end) { + var index = stack.length // Close the flow. + + if (childFlow && end) { + childFlow.write([null]) + childToken = childFlow = undefined + } // Exit open containers. + + while (index-- > size) { + self.containerState = stack[index][1] + stack[index][0].exit.call(self, effects) + } + + stack.length = size + } + + function tokenizeInspect(effects, ok) { + var subcontinued = 0 + inspectResult = {} + return inspectStart + + function inspectStart(code) { + if (subcontinued < stack.length) { + self.containerState = stack[subcontinued][1] + return effects.attempt( + stack[subcontinued][0].continuation, + inspectContinue, + inspectLess + )(code) + } // If we’re continued but in a concrete flow, we can’t have more + // containers. + + if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) { + inspectResult.flowContinue = true + return inspectDone(code) + } + + self.interrupt = + childFlow.currentConstruct && childFlow.currentConstruct.interruptible + self.containerState = {} + return effects.attempt( + containerConstruct, + inspectFlowEnd, + inspectDone + )(code) + } + + function inspectContinue(code) { + subcontinued++ + return self.containerState._closeFlow + ? inspectFlowEnd(code) + : inspectStart(code) + } + + function inspectLess(code) { + if (childFlow.currentConstruct && childFlow.currentConstruct.lazy) { + // Maybe another container? + self.containerState = {} + return effects.attempt( + containerConstruct, + inspectFlowEnd, // Maybe flow, or a blank line? + effects.attempt( + lazyFlowConstruct, + inspectFlowEnd, + effects.check(partialBlankLine, inspectFlowEnd, inspectLazy) + ) + )(code) + } // Otherwise we’re interrupting. + + return inspectFlowEnd(code) + } + + function inspectLazy(code) { + // Act as if all containers are continued. + subcontinued = stack.length + inspectResult.lazy = true + inspectResult.flowContinue = true + return inspectDone(code) + } // We’re done with flow if we have more containers, or an interruption. + + function inspectFlowEnd(code) { + inspectResult.flowEnd = true + return inspectDone(code) + } + + function inspectDone(code) { + inspectResult.continued = subcontinued + self.interrupt = self.containerState = undefined + return ok(code) + } + } +} + +function tokenizeContainer(effects, ok, nok) { + return factorySpace( + effects, + effects.attempt(this.parser.constructs.document, ok, nok), + 'linePrefix', + this.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : 4 + ) +} + +function tokenizeLazyFlow(effects, ok, nok) { + return factorySpace( + effects, + effects.lazy(this.parser.constructs.flow, ok, nok), + 'linePrefix', + this.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : 4 + ) +} + +exports.tokenize = tokenize diff --git a/node_modules/micromark/dist/initialize/flow.js b/node_modules/micromark/dist/initialize/flow.js new file mode 100644 index 00000000..0b7813c8 --- /dev/null +++ b/node_modules/micromark/dist/initialize/flow.js @@ -0,0 +1,60 @@ +'use strict' + +Object.defineProperty(exports, '__esModule', {value: true}) + +var content = require('../tokenize/content.js') +var factorySpace = require('../tokenize/factory-space.js') +var partialBlankLine = require('../tokenize/partial-blank-line.js') + +var tokenize = initializeFlow + +function initializeFlow(effects) { + var self = this + var initial = effects.attempt( + // Try to parse a blank line. + partialBlankLine, + atBlankEnding, // Try to parse initial flow (essentially, only code). + effects.attempt( + this.parser.constructs.flowInitial, + afterConstruct, + factorySpace( + effects, + effects.attempt( + this.parser.constructs.flow, + afterConstruct, + effects.attempt(content, afterConstruct) + ), + 'linePrefix' + ) + ) + ) + return initial + + function atBlankEnding(code) { + if (code === null) { + effects.consume(code) + return + } + + effects.enter('lineEndingBlank') + effects.consume(code) + effects.exit('lineEndingBlank') + self.currentConstruct = undefined + return initial + } + + function afterConstruct(code) { + if (code === null) { + effects.consume(code) + return + } + + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + self.currentConstruct = undefined + return initial + } +} + +exports.tokenize = tokenize diff --git a/node_modules/micromark/dist/initialize/text.js b/node_modules/micromark/dist/initialize/text.js new file mode 100644 index 00000000..d0d460f4 --- /dev/null +++ b/node_modules/micromark/dist/initialize/text.js @@ -0,0 +1,201 @@ +'use strict' + +Object.defineProperty(exports, '__esModule', {value: true}) + +var assign = require('../constant/assign.js') +var shallow = require('../util/shallow.js') + +var text = initializeFactory('text') +var string = initializeFactory('string') +var resolver = { + resolveAll: createResolver() +} + +function initializeFactory(field) { + return { + tokenize: initializeText, + resolveAll: createResolver( + field === 'text' ? resolveAllLineSuffixes : undefined + ) + } + + function initializeText(effects) { + var self = this + var constructs = this.parser.constructs[field] + var text = effects.attempt(constructs, start, notText) + return start + + function start(code) { + return atBreak(code) ? text(code) : notText(code) + } + + function notText(code) { + if (code === null) { + effects.consume(code) + return + } + + effects.enter('data') + effects.consume(code) + return data + } + + function data(code) { + if (atBreak(code)) { + effects.exit('data') + return text(code) + } // Data. + + effects.consume(code) + return data + } + + function atBreak(code) { + var list = constructs[code] + var index = -1 + + if (code === null) { + return true + } + + if (list) { + while (++index < list.length) { + if ( + !list[index].previous || + list[index].previous.call(self, self.previous) + ) { + return true + } + } + } + } + } +} + +function createResolver(extraResolver) { + return resolveAllText + + function resolveAllText(events, context) { + var index = -1 + var enter // A rather boring computation (to merge adjacent `data` events) which + // improves mm performance by 29%. + + while (++index <= events.length) { + if (enter === undefined) { + if (events[index] && events[index][1].type === 'data') { + enter = index + index++ + } + } else if (!events[index] || events[index][1].type !== 'data') { + // Don’t do anything if there is one data token. + if (index !== enter + 2) { + events[enter][1].end = events[index - 1][1].end + events.splice(enter + 2, index - enter - 2) + index = enter + 2 + } + + enter = undefined + } + } + + return extraResolver ? extraResolver(events, context) : events + } +} // A rather ugly set of instructions which again looks at chunks in the input +// stream. +// The reason to do this here is that it is *much* faster to parse in reverse. +// And that we can’t hook into `null` to split the line suffix before an EOF. +// To do: figure out if we can make this into a clean utility, or even in core. +// As it will be useful for GFMs literal autolink extension (and maybe even +// tables?) + +function resolveAllLineSuffixes(events, context) { + var eventIndex = -1 + var chunks + var data + var chunk + var index + var bufferIndex + var size + var tabs + var token + + while (++eventIndex <= events.length) { + if ( + (eventIndex === events.length || + events[eventIndex][1].type === 'lineEnding') && + events[eventIndex - 1][1].type === 'data' + ) { + data = events[eventIndex - 1][1] + chunks = context.sliceStream(data) + index = chunks.length + bufferIndex = -1 + size = 0 + tabs = undefined + + while (index--) { + chunk = chunks[index] + + if (typeof chunk === 'string') { + bufferIndex = chunk.length + + while (chunk.charCodeAt(bufferIndex - 1) === 32) { + size++ + bufferIndex-- + } + + if (bufferIndex) break + bufferIndex = -1 + } // Number + else if (chunk === -2) { + tabs = true + size++ + } else if (chunk === -1); + else { + // Replacement character, exit. + index++ + break + } + } + + if (size) { + token = { + type: + eventIndex === events.length || tabs || size < 2 + ? 'lineSuffix' + : 'hardBreakTrailing', + start: { + line: data.end.line, + column: data.end.column - size, + offset: data.end.offset - size, + _index: data.start._index + index, + _bufferIndex: index + ? bufferIndex + : data.start._bufferIndex + bufferIndex + }, + end: shallow(data.end) + } + data.end = shallow(token.start) + + if (data.start.offset === data.end.offset) { + assign(data, token) + } else { + events.splice( + eventIndex, + 0, + ['enter', token, context], + ['exit', token, context] + ) + eventIndex += 2 + } + } + + eventIndex++ + } + } + + return events +} + +exports.resolver = resolver +exports.string = string +exports.text = text diff --git a/node_modules/micromark/dist/parse.d.ts b/node_modules/micromark/dist/parse.d.ts new file mode 100644 index 00000000..747750c5 --- /dev/null +++ b/node_modules/micromark/dist/parse.d.ts @@ -0,0 +1,5 @@ +import {ParseOptions, Parser} from './shared-types' + +declare function createParser(options?: ParseOptions): Parser + +export default createParser diff --git a/node_modules/micromark/dist/parse.js b/node_modules/micromark/dist/parse.js new file mode 100644 index 00000000..9482300a --- /dev/null +++ b/node_modules/micromark/dist/parse.js @@ -0,0 +1,36 @@ +'use strict' + +var content = require('./initialize/content.js') +var document = require('./initialize/document.js') +var flow = require('./initialize/flow.js') +var text = require('./initialize/text.js') +var combineExtensions = require('./util/combine-extensions.js') +var createTokenizer = require('./util/create-tokenizer.js') +var miniflat = require('./util/miniflat.js') +var constructs = require('./constructs.js') + +function parse(options) { + var settings = options || {} + var parser = { + defined: [], + constructs: combineExtensions( + [constructs].concat(miniflat(settings.extensions)) + ), + content: create(content), + document: create(document), + flow: create(flow), + string: create(text.string), + text: create(text.text) + } + return parser + + function create(initializer) { + return creator + + function creator(from) { + return createTokenizer(parser, initializer, from) + } + } +} + +module.exports = parse diff --git a/node_modules/micromark/dist/postprocess.d.ts b/node_modules/micromark/dist/postprocess.d.ts new file mode 100644 index 00000000..b2af6ac1 --- /dev/null +++ b/node_modules/micromark/dist/postprocess.d.ts @@ -0,0 +1,5 @@ +import {Event} from './shared-types' + +declare function postprocess(events: Event[]): Event[] + +export default postprocess diff --git a/node_modules/micromark/dist/postprocess.js b/node_modules/micromark/dist/postprocess.js new file mode 100644 index 00000000..842f8ce8 --- /dev/null +++ b/node_modules/micromark/dist/postprocess.js @@ -0,0 +1,13 @@ +'use strict' + +var subtokenize = require('./util/subtokenize.js') + +function postprocess(events) { + while (!subtokenize(events)) { + // Empty + } + + return events +} + +module.exports = postprocess diff --git a/node_modules/micromark/dist/preprocess.d.ts b/node_modules/micromark/dist/preprocess.d.ts new file mode 100644 index 00000000..95692ae5 --- /dev/null +++ b/node_modules/micromark/dist/preprocess.d.ts @@ -0,0 +1,11 @@ +import {BufferEncoding} from './shared-types' + +type PreprocessReturn = ( + value: string, + encoding: BufferEncoding, + end?: boolean +) => string[] + +declare function preprocess(): PreprocessReturn + +export default preprocess diff --git a/node_modules/micromark/dist/preprocess.js b/node_modules/micromark/dist/preprocess.js new file mode 100644 index 00000000..b7186454 --- /dev/null +++ b/node_modules/micromark/dist/preprocess.js @@ -0,0 +1,87 @@ +'use strict' + +var search = /[\0\t\n\r]/g + +function preprocess() { + var start = true + var column = 1 + var buffer = '' + var atCarriageReturn + return preprocessor + + function preprocessor(value, encoding, end) { + var chunks = [] + var match + var next + var startPosition + var endPosition + var code + value = buffer + value.toString(encoding) + startPosition = 0 + buffer = '' + + if (start) { + if (value.charCodeAt(0) === 65279) { + startPosition++ + } + + start = undefined + } + + while (startPosition < value.length) { + search.lastIndex = startPosition + match = search.exec(value) + endPosition = match ? match.index : value.length + code = value.charCodeAt(endPosition) + + if (!match) { + buffer = value.slice(startPosition) + break + } + + if (code === 10 && startPosition === endPosition && atCarriageReturn) { + chunks.push(-3) + atCarriageReturn = undefined + } else { + if (atCarriageReturn) { + chunks.push(-5) + atCarriageReturn = undefined + } + + if (startPosition < endPosition) { + chunks.push(value.slice(startPosition, endPosition)) + column += endPosition - startPosition + } + + if (code === 0) { + chunks.push(65533) + column++ + } else if (code === 9) { + next = Math.ceil(column / 4) * 4 + chunks.push(-2) + + while (column++ < next) chunks.push(-1) + } else if (code === 10) { + chunks.push(-4) + column = 1 + } // Must be carriage return. + else { + atCarriageReturn = true + column = 1 + } + } + + startPosition = endPosition + 1 + } + + if (end) { + if (atCarriageReturn) chunks.push(-5) + if (buffer) chunks.push(buffer) + chunks.push(null) + } + + return chunks + } +} + +module.exports = preprocess diff --git a/node_modules/micromark/dist/shared-types.d.ts b/node_modules/micromark/dist/shared-types.d.ts new file mode 100644 index 00000000..81595fd2 --- /dev/null +++ b/node_modules/micromark/dist/shared-types.d.ts @@ -0,0 +1,291 @@ +// Minimum TypeScript Version: 3.0 + +import {Code} from './character/codes' +import {Type} from './constant/types' + +/** + * A location in a string or buffer + */ +export interface Point { + line: number + column: number + offset: number + _index?: number + _bufferIndex?: number +} + +/** + * + */ +export interface Token { + type: Type + start: Point + end: Point + + previous?: Token + next?: Token + + /** + * Declares a token as having content of a certain type. + * Because markdown requires to first parse containers, flow, content completely, + * and then later go on to phrasing and such, it needs to be declared somewhere on the tokens. + */ + contentType?: 'flow' | 'content' | 'string' | 'text' + + /** + * Used when dealing with linked tokens. A child tokenizer is needed to tokenize them, which is stored on those tokens + */ + _tokenizer?: Tokenizer + + /** + * Close and open are also used in attention: + * depending on the characters before and after sequences (**), + * the sequence can open, close, both, or none + */ + _open?: boolean + + /** + * Close and open are also used in attention: + * depending on the characters before and after sequences (**), + * the sequence can open, close, both, or none + */ + _close?: boolean +} + +/** + * + */ +export type Event = [string, Token, Tokenizer] + +/** + * These these are transitions to update the CommonMark State Machine (CSMS) + */ +export interface Effects { + /** + * Enter and exit define where tokens start and end + */ + enter: (type: Type) => Token + + /** + * Enter and exit define where tokens start and end + */ + exit: (type: Type) => Token + + /** + * Consume deals with a character, and moves to the next + */ + consume: (code: number) => void + + /** + * Attempt deals with several values, and tries to parse according to those values. + * If a value resulted in `ok`, it worked, the tokens that were made are used, + * and `returnState` is switched to. + * If the result is `nok`, the attempt failed, + * so we revert to the original state, and `bogusState` is used. + */ + attempt: ( + constructInfo: + | Construct + | Construct[] + | Record<CodeAsKey, Construct | Construct[]>, + returnState: State, + bogusState?: State + ) => (code: Code) => void + + /** + * Interrupt is used for stuff right after a line of content. + */ + interrupt: ( + constructInfo: + | Construct + | Construct[] + | Record<CodeAsKey, Construct | Construct[]>, + ok: Okay, + nok?: NotOkay + ) => (code: Code) => void + + check: ( + constructInfo: + | Construct + | Construct[] + | Record<CodeAsKey, Construct | Construct[]>, + ok: Okay, + nok?: NotOkay + ) => (code: Code) => void + + /** + * Lazy is used for lines that were not properly preceded by the container. + */ + lazy: ( + constructInfo: + | Construct + | Construct[] + | Record<CodeAsKey, Construct | Construct[]>, + ok: Okay, + nok?: NotOkay + ) => void +} + +/** + * A state function should return another function: the next state-as-a-function to go to. + * + * But there is one case where they return void: for the eof character code (at the end of a value) + * The reason being: well, there isn’t any state that makes sense, so void works well. Practically + * that has also helped: if for some reason it was a mistake, then an exception is throw because + * there is no next function, meaning it surfaces early. + */ +export type State = (code: number) => State | void + +/** + * + */ +export type Okay = State + +/** + * + */ +export type NotOkay = State + +/** + * + */ +export interface Tokenizer { + previous: Code + events: Event[] + parser: Parser + sliceStream: (token: Token) => Chunk[] + sliceSerialize: (token: Token) => string + now: () => Point + defineSkip: (value: Point) => void + write: (slice: Chunk[]) => Event[] +} + +export type Resolve = (events: Event[], context: Tokenizer) => Event[] + +export type Tokenize = (context: Tokenizer, effects: Effects) => State + +export interface Construct { + name?: string + tokenize: Tokenize + partial?: boolean + resolve?: Resolve + resolveTo?: Resolve + resolveAll?: Resolve + concrete?: boolean + interruptible?: boolean + lazy?: boolean +} + +/** + * + */ +export interface Parser { + constructs: Record<CodeAsKey, Construct | Construct[]> + content: (from: Point) => Tokenizer + document: (from: Point) => Tokenizer + flow: (from: Point) => Tokenizer + string: (from: Point) => Tokenizer + text: (from: Point) => Tokenizer + defined: string[] +} + +/** + * + */ +export interface TokenizerThis { + events: Event[] + interrupt?: boolean + lazy?: boolean + containerState?: Record<string, unknown> +} + +/** + * `Compile` is the return value of `lib/compile/html.js` + */ +export type Compile = (slice: Event[]) => string + +/** + * https://github.com/micromark/micromark#syntaxextension + */ +export interface SyntaxExtension { + document?: Record<CodeAsKey, Construct | Construct[]> + contentInitial?: Record<CodeAsKey, Construct | Construct[]> + flowInitial?: Record<CodeAsKey, Construct | Construct[]> + flow?: Record<CodeAsKey, Construct | Construct[]> + string?: Record<CodeAsKey, Construct | Construct[]> + text?: Record<CodeAsKey, Construct | Construct[]> +} + +/** + * https://github.com/micromark/micromark#htmlextension + */ +export type HtmlExtension = + | {enter: Record<Type, () => void>} + | {exit: Record<Type, () => void>} + +export type Options = ParseOptions & CompileOptions + +export interface ParseOptions { + // Array of syntax extensions + // + extensions?: SyntaxExtension[] +} + +export interface CompileOptions { + // Value to use for line endings not in `doc` (`string`, default: first line + // ending or `'\n'`). + // + // Generally, micromark copies line endings (`'\r'`, `'\n'`, `'\r\n'`) in the + // markdown document over to the compiled HTML. + // In some cases, such as `> a`, CommonMark requires that extra line endings are + // added: `<blockquote>\n<p>a</p>\n</blockquote>`. + // + defaultLineEnding?: '\r' | '\n' | '\r\n' + // Whether to allow embedded HTML (`boolean`, default: `false`). + // + allowDangerousHtml?: boolean + // Whether to allow potentially dangerous protocols in links and images (`boolean`, + // default: `false`). + // URLs relative to the current protocol are always allowed (such as, `image.jpg`). + // For links, the allowed protocols are `http`, `https`, `irc`, `ircs`, `mailto`, + // and `xmpp`. + // For images, the allowed protocols are `http` and `https`. + // + allowDangerousProtocol?: boolean + // Array of HTML extensions + // + htmlExtensions?: HtmlExtension[] +} + +export type Chunk = NonNullable<Code> | string + +// TypeScript will complain that `null` can't be the key of an object. So when a `Code` value is a key of an object, use CodeAsKey instead. +export type CodeAsKey = NonNullable<Code> | 'null' + +/** + * Encodings supported by the buffer class + * + * @remarks + * This is a copy of the typing from Node, copied to prevent Node globals from being needed. + * Copied from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/a2bc1d868d81733a8969236655fa600bd3651a7b/types/node/globals.d.ts#L174 + */ +export type BufferEncoding = + | 'ascii' + | 'utf8' + | 'utf-8' + | 'utf16le' + | 'ucs2' + | 'ucs-2' + | 'base64' + | 'latin1' + | 'binary' + | 'hex' + +/** + * This is an interface for Node's Buffer. + */ +export interface Buffer { + toString: (encoding?: BufferEncoding) => string +} + +export type CodeCheck = (code: Code) => boolean diff --git a/node_modules/micromark/dist/stream.d.ts b/node_modules/micromark/dist/stream.d.ts new file mode 100644 index 00000000..b814c57b --- /dev/null +++ b/node_modules/micromark/dist/stream.d.ts @@ -0,0 +1,6 @@ +import {EventEmitter} from 'events' +import {Options} from './shared-types' + +declare function stream(options?: Options): EventEmitter + +export default stream diff --git a/node_modules/micromark/dist/stream.js b/node_modules/micromark/dist/stream.js new file mode 100644 index 00000000..c26d4d3b --- /dev/null +++ b/node_modules/micromark/dist/stream.js @@ -0,0 +1,103 @@ +'use strict' + +var events = require('events') +var html = require('./compile/html.js') +var parse = require('./parse.js') +var postprocess = require('./postprocess.js') +var preprocess = require('./preprocess.js') + +function stream(options) { + var preprocess$1 = preprocess() + var tokenize = parse(options).document().write + var compile = html(options) + var emitter = new events.EventEmitter() + var ended + emitter.writable = emitter.readable = true + emitter.write = write + emitter.end = end + emitter.pipe = pipe + return emitter // Write a chunk into memory. + + function write(chunk, encoding, callback) { + if (typeof encoding === 'function') { + callback = encoding + encoding = undefined + } + + if (ended) { + throw new Error('Did not expect `write` after `end`') + } + + tokenize(preprocess$1(chunk || '', encoding)) + + if (callback) { + callback() + } // Signal succesful write. + + return true + } // End the writing. + // Passes all arguments to a final `write`. + + function end(chunk, encoding, callback) { + write(chunk, encoding, callback) + emitter.emit( + 'data', + compile(postprocess(tokenize(preprocess$1('', encoding, true)))) + ) + emitter.emit('end') + ended = true + return true + } // Pipe the processor into a writable stream. + // Basically `Stream#pipe`, but inlined and simplified to keep the bundled + // size down. + // See: <https://github.com/nodejs/node/blob/43a5170/lib/internal/streams/legacy.js#L13>. + + function pipe(dest, options) { + emitter.on('data', ondata) + emitter.on('error', onerror) + emitter.on('end', cleanup) + emitter.on('close', cleanup) // If the `end` option is not supplied, `dest.end()` will be called when the + // `end` or `close` events are received. + + if (!dest._isStdio && (!options || options.end !== false)) { + emitter.on('end', onend) + } + + dest.on('error', onerror) + dest.on('close', cleanup) + dest.emit('pipe', emitter) + return dest // End destination. + + function onend() { + if (dest.end) { + dest.end() + } + } // Handle data. + + function ondata(chunk) { + if (dest.writable) { + dest.write(chunk) + } + } // Clean listeners. + + function cleanup() { + emitter.removeListener('data', ondata) + emitter.removeListener('end', onend) + emitter.removeListener('error', onerror) + emitter.removeListener('end', cleanup) + emitter.removeListener('close', cleanup) + dest.removeListener('error', onerror) + dest.removeListener('close', cleanup) + } // Close dangling pipes and handle unheard errors. + + function onerror(error) { + cleanup() + + if (!emitter.listenerCount('error')) { + throw error // Unhandled stream error in pipe. + } + } + } +} + +module.exports = stream diff --git a/node_modules/micromark/dist/tokenize/attention.js b/node_modules/micromark/dist/tokenize/attention.js new file mode 100644 index 00000000..b34be6f2 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/attention.js @@ -0,0 +1,186 @@ +'use strict' + +var chunkedPush = require('../util/chunked-push.js') +var chunkedSplice = require('../util/chunked-splice.js') +var classifyCharacter = require('../util/classify-character.js') +var movePoint = require('../util/move-point.js') +var resolveAll = require('../util/resolve-all.js') +var shallow = require('../util/shallow.js') + +var attention = { + name: 'attention', + tokenize: tokenizeAttention, + resolveAll: resolveAllAttention +} + +function resolveAllAttention(events, context) { + var index = -1 + var open + var group + var text + var openingSequence + var closingSequence + var use + var nextEvents + var offset // Walk through all events. + // + // Note: performance of this is fine on an mb of normal markdown, but it’s + // a bottleneck for malicious stuff. + + while (++index < events.length) { + // Find a token that can close. + if ( + events[index][0] === 'enter' && + events[index][1].type === 'attentionSequence' && + events[index][1]._close + ) { + open = index // Now walk back to find an opener. + + while (open--) { + // Find a token that can open the closer. + if ( + events[open][0] === 'exit' && + events[open][1].type === 'attentionSequence' && + events[open][1]._open && // If the markers are the same: + context.sliceSerialize(events[open][1]).charCodeAt(0) === + context.sliceSerialize(events[index][1]).charCodeAt(0) + ) { + // If the opening can close or the closing can open, + // and the close size *is not* a multiple of three, + // but the sum of the opening and closing size *is* multiple of three, + // then don’t match. + if ( + (events[open][1]._close || events[index][1]._open) && + (events[index][1].end.offset - events[index][1].start.offset) % 3 && + !( + (events[open][1].end.offset - + events[open][1].start.offset + + events[index][1].end.offset - + events[index][1].start.offset) % + 3 + ) + ) { + continue + } // Number of markers to use from the sequence. + + use = + events[open][1].end.offset - events[open][1].start.offset > 1 && + events[index][1].end.offset - events[index][1].start.offset > 1 + ? 2 + : 1 + openingSequence = { + type: use > 1 ? 'strongSequence' : 'emphasisSequence', + start: movePoint(shallow(events[open][1].end), -use), + end: shallow(events[open][1].end) + } + closingSequence = { + type: use > 1 ? 'strongSequence' : 'emphasisSequence', + start: shallow(events[index][1].start), + end: movePoint(shallow(events[index][1].start), use) + } + text = { + type: use > 1 ? 'strongText' : 'emphasisText', + start: shallow(events[open][1].end), + end: shallow(events[index][1].start) + } + group = { + type: use > 1 ? 'strong' : 'emphasis', + start: shallow(openingSequence.start), + end: shallow(closingSequence.end) + } + events[open][1].end = shallow(openingSequence.start) + events[index][1].start = shallow(closingSequence.end) + nextEvents = [] // If there are more markers in the opening, add them before. + + if (events[open][1].end.offset - events[open][1].start.offset) { + nextEvents = chunkedPush(nextEvents, [ + ['enter', events[open][1], context], + ['exit', events[open][1], context] + ]) + } // Opening. + + nextEvents = chunkedPush(nextEvents, [ + ['enter', group, context], + ['enter', openingSequence, context], + ['exit', openingSequence, context], + ['enter', text, context] + ]) // Between. + + nextEvents = chunkedPush( + nextEvents, + resolveAll( + context.parser.constructs.insideSpan.null, + events.slice(open + 1, index), + context + ) + ) // Closing. + + nextEvents = chunkedPush(nextEvents, [ + ['exit', text, context], + ['enter', closingSequence, context], + ['exit', closingSequence, context], + ['exit', group, context] + ]) // If there are more markers in the closing, add them after. + + if (events[index][1].end.offset - events[index][1].start.offset) { + offset = 2 + nextEvents = chunkedPush(nextEvents, [ + ['enter', events[index][1], context], + ['exit', events[index][1], context] + ]) + } else { + offset = 0 + } + + chunkedSplice(events, open - 1, index - open + 3, nextEvents) + index = open + nextEvents.length - offset - 2 + break + } + } + } + } // Remove remaining sequences. + + index = -1 + + while (++index < events.length) { + if (events[index][1].type === 'attentionSequence') { + events[index][1].type = 'data' + } + } + + return events +} + +function tokenizeAttention(effects, ok) { + var before = classifyCharacter(this.previous) + var marker + return start + + function start(code) { + effects.enter('attentionSequence') + marker = code + return sequence(code) + } + + function sequence(code) { + var token + var after + var open + var close + + if (code === marker) { + effects.consume(code) + return sequence + } + + token = effects.exit('attentionSequence') + after = classifyCharacter(code) + open = !after || (after === 2 && before) + close = !before || (before === 2 && after) + token._open = marker === 42 ? open : open && (before || !close) + token._close = marker === 42 ? close : close && (after || !open) + return ok(code) + } +} + +module.exports = attention diff --git a/node_modules/micromark/dist/tokenize/autolink.js b/node_modules/micromark/dist/tokenize/autolink.js new file mode 100644 index 00000000..d235d5f4 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/autolink.js @@ -0,0 +1,125 @@ +'use strict' + +var asciiAlpha = require('../character/ascii-alpha.js') +var asciiAlphanumeric = require('../character/ascii-alphanumeric.js') +var asciiAtext = require('../character/ascii-atext.js') +var asciiControl = require('../character/ascii-control.js') + +var autolink = { + name: 'autolink', + tokenize: tokenizeAutolink +} + +function tokenizeAutolink(effects, ok, nok) { + var size = 1 + return start + + function start(code) { + effects.enter('autolink') + effects.enter('autolinkMarker') + effects.consume(code) + effects.exit('autolinkMarker') + effects.enter('autolinkProtocol') + return open + } + + function open(code) { + if (asciiAlpha(code)) { + effects.consume(code) + return schemeOrEmailAtext + } + + return asciiAtext(code) ? emailAtext(code) : nok(code) + } + + function schemeOrEmailAtext(code) { + return code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code) + ? schemeInsideOrEmailAtext(code) + : emailAtext(code) + } + + function schemeInsideOrEmailAtext(code) { + if (code === 58) { + effects.consume(code) + return urlInside + } + + if ( + (code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) && + size++ < 32 + ) { + effects.consume(code) + return schemeInsideOrEmailAtext + } + + return emailAtext(code) + } + + function urlInside(code) { + if (code === 62) { + effects.exit('autolinkProtocol') + return end(code) + } + + if (code === 32 || code === 60 || asciiControl(code)) { + return nok(code) + } + + effects.consume(code) + return urlInside + } + + function emailAtext(code) { + if (code === 64) { + effects.consume(code) + size = 0 + return emailAtSignOrDot + } + + if (asciiAtext(code)) { + effects.consume(code) + return emailAtext + } + + return nok(code) + } + + function emailAtSignOrDot(code) { + return asciiAlphanumeric(code) ? emailLabel(code) : nok(code) + } + + function emailLabel(code) { + if (code === 46) { + effects.consume(code) + size = 0 + return emailAtSignOrDot + } + + if (code === 62) { + // Exit, then change the type. + effects.exit('autolinkProtocol').type = 'autolinkEmail' + return end(code) + } + + return emailValue(code) + } + + function emailValue(code) { + if ((code === 45 || asciiAlphanumeric(code)) && size++ < 63) { + effects.consume(code) + return code === 45 ? emailValue : emailLabel + } + + return nok(code) + } + + function end(code) { + effects.enter('autolinkMarker') + effects.consume(code) + effects.exit('autolinkMarker') + effects.exit('autolink') + return ok + } +} + +module.exports = autolink diff --git a/node_modules/micromark/dist/tokenize/block-quote.js b/node_modules/micromark/dist/tokenize/block-quote.js new file mode 100644 index 00000000..b3090ca2 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/block-quote.js @@ -0,0 +1,67 @@ +'use strict' + +var markdownSpace = require('../character/markdown-space.js') +var factorySpace = require('./factory-space.js') + +var blockQuote = { + name: 'blockQuote', + tokenize: tokenizeBlockQuoteStart, + continuation: { + tokenize: tokenizeBlockQuoteContinuation + }, + exit: exit +} + +function tokenizeBlockQuoteStart(effects, ok, nok) { + var self = this + return start + + function start(code) { + if (code === 62) { + if (!self.containerState.open) { + effects.enter('blockQuote', { + _container: true + }) + self.containerState.open = true + } + + effects.enter('blockQuotePrefix') + effects.enter('blockQuoteMarker') + effects.consume(code) + effects.exit('blockQuoteMarker') + return after + } + + return nok(code) + } + + function after(code) { + if (markdownSpace(code)) { + effects.enter('blockQuotePrefixWhitespace') + effects.consume(code) + effects.exit('blockQuotePrefixWhitespace') + effects.exit('blockQuotePrefix') + return ok + } + + effects.exit('blockQuotePrefix') + return ok(code) + } +} + +function tokenizeBlockQuoteContinuation(effects, ok, nok) { + return factorySpace( + effects, + effects.attempt(blockQuote, ok, nok), + 'linePrefix', + this.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : 4 + ) +} + +function exit(effects) { + effects.exit('blockQuote') +} + +module.exports = blockQuote diff --git a/node_modules/micromark/dist/tokenize/character-escape.js b/node_modules/micromark/dist/tokenize/character-escape.js new file mode 100644 index 00000000..dcad7353 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/character-escape.js @@ -0,0 +1,34 @@ +'use strict' + +var asciiPunctuation = require('../character/ascii-punctuation.js') + +var characterEscape = { + name: 'characterEscape', + tokenize: tokenizeCharacterEscape +} + +function tokenizeCharacterEscape(effects, ok, nok) { + return start + + function start(code) { + effects.enter('characterEscape') + effects.enter('escapeMarker') + effects.consume(code) + effects.exit('escapeMarker') + return open + } + + function open(code) { + if (asciiPunctuation(code)) { + effects.enter('characterEscapeValue') + effects.consume(code) + effects.exit('characterEscapeValue') + effects.exit('characterEscape') + return ok + } + + return nok(code) + } +} + +module.exports = characterEscape diff --git a/node_modules/micromark/dist/tokenize/character-reference.js b/node_modules/micromark/dist/tokenize/character-reference.js new file mode 100644 index 00000000..101027db --- /dev/null +++ b/node_modules/micromark/dist/tokenize/character-reference.js @@ -0,0 +1,94 @@ +'use strict' + +var decodeEntity = require('parse-entities/decode-entity.js') +var asciiAlphanumeric = require('../character/ascii-alphanumeric.js') +var asciiDigit = require('../character/ascii-digit.js') +var asciiHexDigit = require('../character/ascii-hex-digit.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var decodeEntity__default = /*#__PURE__*/ _interopDefaultLegacy(decodeEntity) + +var characterReference = { + name: 'characterReference', + tokenize: tokenizeCharacterReference +} + +function tokenizeCharacterReference(effects, ok, nok) { + var self = this + var size = 0 + var max + var test + return start + + function start(code) { + effects.enter('characterReference') + effects.enter('characterReferenceMarker') + effects.consume(code) + effects.exit('characterReferenceMarker') + return open + } + + function open(code) { + if (code === 35) { + effects.enter('characterReferenceMarkerNumeric') + effects.consume(code) + effects.exit('characterReferenceMarkerNumeric') + return numeric + } + + effects.enter('characterReferenceValue') + max = 31 + test = asciiAlphanumeric + return value(code) + } + + function numeric(code) { + if (code === 88 || code === 120) { + effects.enter('characterReferenceMarkerHexadecimal') + effects.consume(code) + effects.exit('characterReferenceMarkerHexadecimal') + effects.enter('characterReferenceValue') + max = 6 + test = asciiHexDigit + return value + } + + effects.enter('characterReferenceValue') + max = 7 + test = asciiDigit + return value(code) + } + + function value(code) { + var token + + if (code === 59 && size) { + token = effects.exit('characterReferenceValue') + + if ( + test === asciiAlphanumeric && + !decodeEntity__default['default'](self.sliceSerialize(token)) + ) { + return nok(code) + } + + effects.enter('characterReferenceMarker') + effects.consume(code) + effects.exit('characterReferenceMarker') + effects.exit('characterReference') + return ok + } + + if (test(code) && size++ < max) { + effects.consume(code) + return value + } + + return nok(code) + } +} + +module.exports = characterReference diff --git a/node_modules/micromark/dist/tokenize/code-fenced.js b/node_modules/micromark/dist/tokenize/code-fenced.js new file mode 100644 index 00000000..16f88947 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/code-fenced.js @@ -0,0 +1,176 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var prefixSize = require('../util/prefix-size.js') +var factorySpace = require('./factory-space.js') + +var codeFenced = { + name: 'codeFenced', + tokenize: tokenizeCodeFenced, + concrete: true +} + +function tokenizeCodeFenced(effects, ok, nok) { + var self = this + var closingFenceConstruct = { + tokenize: tokenizeClosingFence, + partial: true + } + var initialPrefix = prefixSize(this.events, 'linePrefix') + var sizeOpen = 0 + var marker + return start + + function start(code) { + effects.enter('codeFenced') + effects.enter('codeFencedFence') + effects.enter('codeFencedFenceSequence') + marker = code + return sequenceOpen(code) + } + + function sequenceOpen(code) { + if (code === marker) { + effects.consume(code) + sizeOpen++ + return sequenceOpen + } + + effects.exit('codeFencedFenceSequence') + return sizeOpen < 3 + ? nok(code) + : factorySpace(effects, infoOpen, 'whitespace')(code) + } + + function infoOpen(code) { + if (code === null || markdownLineEnding(code)) { + return openAfter(code) + } + + effects.enter('codeFencedFenceInfo') + effects.enter('chunkString', { + contentType: 'string' + }) + return info(code) + } + + function info(code) { + if (code === null || markdownLineEndingOrSpace(code)) { + effects.exit('chunkString') + effects.exit('codeFencedFenceInfo') + return factorySpace(effects, infoAfter, 'whitespace')(code) + } + + if (code === 96 && code === marker) return nok(code) + effects.consume(code) + return info + } + + function infoAfter(code) { + if (code === null || markdownLineEnding(code)) { + return openAfter(code) + } + + effects.enter('codeFencedFenceMeta') + effects.enter('chunkString', { + contentType: 'string' + }) + return meta(code) + } + + function meta(code) { + if (code === null || markdownLineEnding(code)) { + effects.exit('chunkString') + effects.exit('codeFencedFenceMeta') + return openAfter(code) + } + + if (code === 96 && code === marker) return nok(code) + effects.consume(code) + return meta + } + + function openAfter(code) { + effects.exit('codeFencedFence') + return self.interrupt ? ok(code) : content(code) + } + + function content(code) { + if (code === null) { + return after(code) + } + + if (markdownLineEnding(code)) { + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + return effects.attempt( + closingFenceConstruct, + after, + initialPrefix + ? factorySpace(effects, content, 'linePrefix', initialPrefix + 1) + : content + ) + } + + effects.enter('codeFlowValue') + return contentContinue(code) + } + + function contentContinue(code) { + if (code === null || markdownLineEnding(code)) { + effects.exit('codeFlowValue') + return content(code) + } + + effects.consume(code) + return contentContinue + } + + function after(code) { + effects.exit('codeFenced') + return ok(code) + } + + function tokenizeClosingFence(effects, ok, nok) { + var size = 0 + return factorySpace( + effects, + closingSequenceStart, + 'linePrefix', + this.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : 4 + ) + + function closingSequenceStart(code) { + effects.enter('codeFencedFence') + effects.enter('codeFencedFenceSequence') + return closingSequence(code) + } + + function closingSequence(code) { + if (code === marker) { + effects.consume(code) + size++ + return closingSequence + } + + if (size < sizeOpen) return nok(code) + effects.exit('codeFencedFenceSequence') + return factorySpace(effects, closingSequenceEnd, 'whitespace')(code) + } + + function closingSequenceEnd(code) { + if (code === null || markdownLineEnding(code)) { + effects.exit('codeFencedFence') + return ok(code) + } + + return nok(code) + } + } +} + +module.exports = codeFenced diff --git a/node_modules/micromark/dist/tokenize/code-indented.js b/node_modules/micromark/dist/tokenize/code-indented.js new file mode 100644 index 00000000..604f094d --- /dev/null +++ b/node_modules/micromark/dist/tokenize/code-indented.js @@ -0,0 +1,72 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var chunkedSplice = require('../util/chunked-splice.js') +var prefixSize = require('../util/prefix-size.js') +var factorySpace = require('./factory-space.js') + +var codeIndented = { + name: 'codeIndented', + tokenize: tokenizeCodeIndented, + resolve: resolveCodeIndented +} +var indentedContentConstruct = { + tokenize: tokenizeIndentedContent, + partial: true +} + +function resolveCodeIndented(events, context) { + var code = { + type: 'codeIndented', + start: events[0][1].start, + end: events[events.length - 1][1].end + } + chunkedSplice(events, 0, 0, [['enter', code, context]]) + chunkedSplice(events, events.length, 0, [['exit', code, context]]) + return events +} + +function tokenizeCodeIndented(effects, ok, nok) { + return effects.attempt(indentedContentConstruct, afterPrefix, nok) + + function afterPrefix(code) { + if (code === null) { + return ok(code) + } + + if (markdownLineEnding(code)) { + return effects.attempt(indentedContentConstruct, afterPrefix, ok)(code) + } + + effects.enter('codeFlowValue') + return content(code) + } + + function content(code) { + if (code === null || markdownLineEnding(code)) { + effects.exit('codeFlowValue') + return afterPrefix(code) + } + + effects.consume(code) + return content + } +} + +function tokenizeIndentedContent(effects, ok, nok) { + var self = this + return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1) + + function afterPrefix(code) { + if (markdownLineEnding(code)) { + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1) + } + + return prefixSize(self.events, 'linePrefix') < 4 ? nok(code) : ok(code) + } +} + +module.exports = codeIndented diff --git a/node_modules/micromark/dist/tokenize/code-text.js b/node_modules/micromark/dist/tokenize/code-text.js new file mode 100644 index 00000000..d4a8fbe3 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/code-text.js @@ -0,0 +1,162 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') + +var codeText = { + name: 'codeText', + tokenize: tokenizeCodeText, + resolve: resolveCodeText, + previous: previous +} + +function resolveCodeText(events) { + var tailExitIndex = events.length - 4 + var headEnterIndex = 3 + var index + var enter // If we start and end with an EOL or a space. + + if ( + (events[headEnterIndex][1].type === 'lineEnding' || + events[headEnterIndex][1].type === 'space') && + (events[tailExitIndex][1].type === 'lineEnding' || + events[tailExitIndex][1].type === 'space') + ) { + index = headEnterIndex // And we have data. + + while (++index < tailExitIndex) { + if (events[index][1].type === 'codeTextData') { + // Then we have padding. + events[tailExitIndex][1].type = events[headEnterIndex][1].type = + 'codeTextPadding' + headEnterIndex += 2 + tailExitIndex -= 2 + break + } + } + } // Merge adjacent spaces and data. + + index = headEnterIndex - 1 + tailExitIndex++ + + while (++index <= tailExitIndex) { + if (enter === undefined) { + if (index !== tailExitIndex && events[index][1].type !== 'lineEnding') { + enter = index + } + } else if ( + index === tailExitIndex || + events[index][1].type === 'lineEnding' + ) { + events[enter][1].type = 'codeTextData' + + if (index !== enter + 2) { + events[enter][1].end = events[index - 1][1].end + events.splice(enter + 2, index - enter - 2) + tailExitIndex -= index - enter - 2 + index = enter + 2 + } + + enter = undefined + } + } + + return events +} + +function previous(code) { + // If there is a previous code, there will always be a tail. + return ( + code !== 96 || + this.events[this.events.length - 1][1].type === 'characterEscape' + ) +} + +function tokenizeCodeText(effects, ok, nok) { + var sizeOpen = 0 + var size + var token + return start + + function start(code) { + effects.enter('codeText') + effects.enter('codeTextSequence') + return openingSequence(code) + } + + function openingSequence(code) { + if (code === 96) { + effects.consume(code) + sizeOpen++ + return openingSequence + } + + effects.exit('codeTextSequence') + return gap(code) + } + + function gap(code) { + // EOF. + if (code === null) { + return nok(code) + } // Closing fence? + // Could also be data. + + if (code === 96) { + token = effects.enter('codeTextSequence') + size = 0 + return closingSequence(code) + } // Tabs don’t work, and virtual spaces don’t make sense. + + if (code === 32) { + effects.enter('space') + effects.consume(code) + effects.exit('space') + return gap + } + + if (markdownLineEnding(code)) { + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + return gap + } // Data. + + effects.enter('codeTextData') + return data(code) + } // In code. + + function data(code) { + if ( + code === null || + code === 32 || + code === 96 || + markdownLineEnding(code) + ) { + effects.exit('codeTextData') + return gap(code) + } + + effects.consume(code) + return data + } // Closing fence. + + function closingSequence(code) { + // More. + if (code === 96) { + effects.consume(code) + size++ + return closingSequence + } // Done! + + if (size === sizeOpen) { + effects.exit('codeTextSequence') + effects.exit('codeText') + return ok(code) + } // More or less accents: mark as data. + + token.type = 'codeTextData' + return data(code) + } +} + +module.exports = codeText diff --git a/node_modules/micromark/dist/tokenize/content.js b/node_modules/micromark/dist/tokenize/content.js new file mode 100644 index 00000000..e1a712eb --- /dev/null +++ b/node_modules/micromark/dist/tokenize/content.js @@ -0,0 +1,99 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var prefixSize = require('../util/prefix-size.js') +var subtokenize = require('../util/subtokenize.js') +var factorySpace = require('./factory-space.js') + +// No name because it must not be turned off. +var content = { + tokenize: tokenizeContent, + resolve: resolveContent, + interruptible: true, + lazy: true +} +var continuationConstruct = { + tokenize: tokenizeContinuation, + partial: true +} // Content is transparent: it’s parsed right now. That way, definitions are also +// parsed right now: before text in paragraphs (specifically, media) are parsed. + +function resolveContent(events) { + subtokenize(events) + return events +} + +function tokenizeContent(effects, ok) { + var previous + return start + + function start(code) { + effects.enter('content') + previous = effects.enter('chunkContent', { + contentType: 'content' + }) + return data(code) + } + + function data(code) { + if (code === null) { + return contentEnd(code) + } + + if (markdownLineEnding(code)) { + return effects.check( + continuationConstruct, + contentContinue, + contentEnd + )(code) + } // Data. + + effects.consume(code) + return data + } + + function contentEnd(code) { + effects.exit('chunkContent') + effects.exit('content') + return ok(code) + } + + function contentContinue(code) { + effects.consume(code) + effects.exit('chunkContent') + previous = previous.next = effects.enter('chunkContent', { + contentType: 'content', + previous: previous + }) + return data + } +} + +function tokenizeContinuation(effects, ok, nok) { + var self = this + return startLookahead + + function startLookahead(code) { + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + return factorySpace(effects, prefixed, 'linePrefix') + } + + function prefixed(code) { + if (code === null || markdownLineEnding(code)) { + return nok(code) + } + + if ( + self.parser.constructs.disable.null.indexOf('codeIndented') > -1 || + prefixSize(self.events, 'linePrefix') < 4 + ) { + return effects.interrupt(self.parser.constructs.flow, nok, ok)(code) + } + + return ok(code) + } +} + +module.exports = content diff --git a/node_modules/micromark/dist/tokenize/definition.js b/node_modules/micromark/dist/tokenize/definition.js new file mode 100644 index 00000000..21505d89 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/definition.js @@ -0,0 +1,115 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var normalizeIdentifier = require('../util/normalize-identifier.js') +var factoryDestination = require('./factory-destination.js') +var factoryLabel = require('./factory-label.js') +var factorySpace = require('./factory-space.js') +var factoryWhitespace = require('./factory-whitespace.js') +var factoryTitle = require('./factory-title.js') + +var definition = { + name: 'definition', + tokenize: tokenizeDefinition +} +var titleConstruct = { + tokenize: tokenizeTitle, + partial: true +} + +function tokenizeDefinition(effects, ok, nok) { + var self = this + var identifier + return start + + function start(code) { + effects.enter('definition') + return factoryLabel.call( + self, + effects, + labelAfter, + nok, + 'definitionLabel', + 'definitionLabelMarker', + 'definitionLabelString' + )(code) + } + + function labelAfter(code) { + identifier = normalizeIdentifier( + self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1) + ) + + if (code === 58) { + effects.enter('definitionMarker') + effects.consume(code) + effects.exit('definitionMarker') // Note: blank lines can’t exist in content. + + return factoryWhitespace( + effects, + factoryDestination( + effects, + effects.attempt( + titleConstruct, + factorySpace(effects, after, 'whitespace'), + factorySpace(effects, after, 'whitespace') + ), + nok, + 'definitionDestination', + 'definitionDestinationLiteral', + 'definitionDestinationLiteralMarker', + 'definitionDestinationRaw', + 'definitionDestinationString' + ) + ) + } + + return nok(code) + } + + function after(code) { + if (code === null || markdownLineEnding(code)) { + effects.exit('definition') + + if (self.parser.defined.indexOf(identifier) < 0) { + self.parser.defined.push(identifier) + } + + return ok(code) + } + + return nok(code) + } +} + +function tokenizeTitle(effects, ok, nok) { + return start + + function start(code) { + return markdownLineEndingOrSpace(code) + ? factoryWhitespace(effects, before)(code) + : nok(code) + } + + function before(code) { + if (code === 34 || code === 39 || code === 40) { + return factoryTitle( + effects, + factorySpace(effects, after, 'whitespace'), + nok, + 'definitionTitle', + 'definitionTitleMarker', + 'definitionTitleString' + )(code) + } + + return nok(code) + } + + function after(code) { + return code === null || markdownLineEnding(code) ? ok(code) : nok(code) + } +} + +module.exports = definition diff --git a/node_modules/micromark/dist/tokenize/factory-destination.js b/node_modules/micromark/dist/tokenize/factory-destination.js new file mode 100644 index 00000000..1572025c --- /dev/null +++ b/node_modules/micromark/dist/tokenize/factory-destination.js @@ -0,0 +1,131 @@ +'use strict' + +var asciiControl = require('../character/ascii-control.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') + +// eslint-disable-next-line max-params +function destinationFactory( + effects, + ok, + nok, + type, + literalType, + literalMarkerType, + rawType, + stringType, + max +) { + var limit = max || Infinity + var balance = 0 + return start + + function start(code) { + if (code === 60) { + effects.enter(type) + effects.enter(literalType) + effects.enter(literalMarkerType) + effects.consume(code) + effects.exit(literalMarkerType) + return destinationEnclosedBefore + } + + if (asciiControl(code) || code === 41) { + return nok(code) + } + + effects.enter(type) + effects.enter(rawType) + effects.enter(stringType) + effects.enter('chunkString', { + contentType: 'string' + }) + return destinationRaw(code) + } + + function destinationEnclosedBefore(code) { + if (code === 62) { + effects.enter(literalMarkerType) + effects.consume(code) + effects.exit(literalMarkerType) + effects.exit(literalType) + effects.exit(type) + return ok + } + + effects.enter(stringType) + effects.enter('chunkString', { + contentType: 'string' + }) + return destinationEnclosed(code) + } + + function destinationEnclosed(code) { + if (code === 62) { + effects.exit('chunkString') + effects.exit(stringType) + return destinationEnclosedBefore(code) + } + + if (code === null || code === 60 || markdownLineEnding(code)) { + return nok(code) + } + + effects.consume(code) + return code === 92 ? destinationEnclosedEscape : destinationEnclosed + } + + function destinationEnclosedEscape(code) { + if (code === 60 || code === 62 || code === 92) { + effects.consume(code) + return destinationEnclosed + } + + return destinationEnclosed(code) + } + + function destinationRaw(code) { + if (code === 40) { + if (++balance > limit) return nok(code) + effects.consume(code) + return destinationRaw + } + + if (code === 41) { + if (!balance--) { + effects.exit('chunkString') + effects.exit(stringType) + effects.exit(rawType) + effects.exit(type) + return ok(code) + } + + effects.consume(code) + return destinationRaw + } + + if (code === null || markdownLineEndingOrSpace(code)) { + if (balance) return nok(code) + effects.exit('chunkString') + effects.exit(stringType) + effects.exit(rawType) + effects.exit(type) + return ok(code) + } + + if (asciiControl(code)) return nok(code) + effects.consume(code) + return code === 92 ? destinationRawEscape : destinationRaw + } + + function destinationRawEscape(code) { + if (code === 40 || code === 41 || code === 92) { + effects.consume(code) + return destinationRaw + } + + return destinationRaw(code) + } +} + +module.exports = destinationFactory diff --git a/node_modules/micromark/dist/tokenize/factory-label.js b/node_modules/micromark/dist/tokenize/factory-label.js new file mode 100644 index 00000000..500c95a8 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/factory-label.js @@ -0,0 +1,88 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownSpace = require('../character/markdown-space.js') + +// eslint-disable-next-line max-params +function labelFactory(effects, ok, nok, type, markerType, stringType) { + var self = this + var size = 0 + var data + return start + + function start(code) { + effects.enter(type) + effects.enter(markerType) + effects.consume(code) + effects.exit(markerType) + effects.enter(stringType) + return atBreak + } + + function atBreak(code) { + if ( + code === null || + code === 91 || + (code === 93 && !data) || + /* c8 ignore next */ + (code === 94 && + /* c8 ignore next */ + !size && + /* c8 ignore next */ + '_hiddenFootnoteSupport' in self.parser.constructs) || + size > 999 + ) { + return nok(code) + } + + if (code === 93) { + effects.exit(stringType) + effects.enter(markerType) + effects.consume(code) + effects.exit(markerType) + effects.exit(type) + return ok + } + + if (markdownLineEnding(code)) { + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + return atBreak + } + + effects.enter('chunkString', { + contentType: 'string' + }) + return label(code) + } + + function label(code) { + if ( + code === null || + code === 91 || + code === 93 || + markdownLineEnding(code) || + size++ > 999 + ) { + effects.exit('chunkString') + return atBreak(code) + } + + effects.consume(code) + data = data || !markdownSpace(code) + return code === 92 ? labelEscape : label + } + + function labelEscape(code) { + if (code === 91 || code === 92 || code === 93) { + effects.consume(code) + size++ + return label + } + + return label(code) + } +} + +module.exports = labelFactory diff --git a/node_modules/micromark/dist/tokenize/factory-space.js b/node_modules/micromark/dist/tokenize/factory-space.js new file mode 100644 index 00000000..b1026df9 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/factory-space.js @@ -0,0 +1,30 @@ +'use strict' + +var markdownSpace = require('../character/markdown-space.js') + +function spaceFactory(effects, ok, type, max) { + var limit = max ? max - 1 : Infinity + var size = 0 + return start + + function start(code) { + if (markdownSpace(code)) { + effects.enter(type) + return prefix(code) + } + + return ok(code) + } + + function prefix(code) { + if (markdownSpace(code) && size++ < limit) { + effects.consume(code) + return prefix + } + + effects.exit(type) + return ok(code) + } +} + +module.exports = spaceFactory diff --git a/node_modules/micromark/dist/tokenize/factory-title.js b/node_modules/micromark/dist/tokenize/factory-title.js new file mode 100644 index 00000000..6b3d0545 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/factory-title.js @@ -0,0 +1,75 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var factorySpace = require('./factory-space.js') + +function titleFactory(effects, ok, nok, type, markerType, stringType) { + var marker + return start + + function start(code) { + effects.enter(type) + effects.enter(markerType) + effects.consume(code) + effects.exit(markerType) + marker = code === 40 ? 41 : code + return atFirstTitleBreak + } + + function atFirstTitleBreak(code) { + if (code === marker) { + effects.enter(markerType) + effects.consume(code) + effects.exit(markerType) + effects.exit(type) + return ok + } + + effects.enter(stringType) + return atTitleBreak(code) + } + + function atTitleBreak(code) { + if (code === marker) { + effects.exit(stringType) + return atFirstTitleBreak(marker) + } + + if (code === null) { + return nok(code) + } // Note: blank lines can’t exist in content. + + if (markdownLineEnding(code)) { + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + return factorySpace(effects, atTitleBreak, 'linePrefix') + } + + effects.enter('chunkString', { + contentType: 'string' + }) + return title(code) + } + + function title(code) { + if (code === marker || code === null || markdownLineEnding(code)) { + effects.exit('chunkString') + return atTitleBreak(code) + } + + effects.consume(code) + return code === 92 ? titleEscape : title + } + + function titleEscape(code) { + if (code === marker || code === 92) { + effects.consume(code) + return title + } + + return title(code) + } +} + +module.exports = titleFactory diff --git a/node_modules/micromark/dist/tokenize/factory-whitespace.js b/node_modules/micromark/dist/tokenize/factory-whitespace.js new file mode 100644 index 00000000..8141e961 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/factory-whitespace.js @@ -0,0 +1,32 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownSpace = require('../character/markdown-space.js') +var factorySpace = require('./factory-space.js') + +function whitespaceFactory(effects, ok) { + var seen + return start + + function start(code) { + if (markdownLineEnding(code)) { + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + seen = true + return start + } + + if (markdownSpace(code)) { + return factorySpace( + effects, + start, + seen ? 'linePrefix' : 'lineSuffix' + )(code) + } + + return ok(code) + } +} + +module.exports = whitespaceFactory diff --git a/node_modules/micromark/dist/tokenize/hard-break-escape.js b/node_modules/micromark/dist/tokenize/hard-break-escape.js new file mode 100644 index 00000000..bb49becb --- /dev/null +++ b/node_modules/micromark/dist/tokenize/hard-break-escape.js @@ -0,0 +1,31 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') + +var hardBreakEscape = { + name: 'hardBreakEscape', + tokenize: tokenizeHardBreakEscape +} + +function tokenizeHardBreakEscape(effects, ok, nok) { + return start + + function start(code) { + effects.enter('hardBreakEscape') + effects.enter('escapeMarker') + effects.consume(code) + return open + } + + function open(code) { + if (markdownLineEnding(code)) { + effects.exit('escapeMarker') + effects.exit('hardBreakEscape') + return ok(code) + } + + return nok(code) + } +} + +module.exports = hardBreakEscape diff --git a/node_modules/micromark/dist/tokenize/heading-atx.js b/node_modules/micromark/dist/tokenize/heading-atx.js new file mode 100644 index 00000000..8d8514ba --- /dev/null +++ b/node_modules/micromark/dist/tokenize/heading-atx.js @@ -0,0 +1,129 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var markdownSpace = require('../character/markdown-space.js') +var chunkedSplice = require('../util/chunked-splice.js') +var factorySpace = require('./factory-space.js') + +var headingAtx = { + name: 'headingAtx', + tokenize: tokenizeHeadingAtx, + resolve: resolveHeadingAtx +} + +function resolveHeadingAtx(events, context) { + var contentEnd = events.length - 2 + var contentStart = 3 + var content + var text // Prefix whitespace, part of the opening. + + if (events[contentStart][1].type === 'whitespace') { + contentStart += 2 + } // Suffix whitespace, part of the closing. + + if ( + contentEnd - 2 > contentStart && + events[contentEnd][1].type === 'whitespace' + ) { + contentEnd -= 2 + } + + if ( + events[contentEnd][1].type === 'atxHeadingSequence' && + (contentStart === contentEnd - 1 || + (contentEnd - 4 > contentStart && + events[contentEnd - 2][1].type === 'whitespace')) + ) { + contentEnd -= contentStart + 1 === contentEnd ? 2 : 4 + } + + if (contentEnd > contentStart) { + content = { + type: 'atxHeadingText', + start: events[contentStart][1].start, + end: events[contentEnd][1].end + } + text = { + type: 'chunkText', + start: events[contentStart][1].start, + end: events[contentEnd][1].end, + contentType: 'text' + } + chunkedSplice(events, contentStart, contentEnd - contentStart + 1, [ + ['enter', content, context], + ['enter', text, context], + ['exit', text, context], + ['exit', content, context] + ]) + } + + return events +} + +function tokenizeHeadingAtx(effects, ok, nok) { + var self = this + var size = 0 + return start + + function start(code) { + effects.enter('atxHeading') + effects.enter('atxHeadingSequence') + return fenceOpenInside(code) + } + + function fenceOpenInside(code) { + if (code === 35 && size++ < 6) { + effects.consume(code) + return fenceOpenInside + } + + if (code === null || markdownLineEndingOrSpace(code)) { + effects.exit('atxHeadingSequence') + return self.interrupt ? ok(code) : headingBreak(code) + } + + return nok(code) + } + + function headingBreak(code) { + if (code === 35) { + effects.enter('atxHeadingSequence') + return sequence(code) + } + + if (code === null || markdownLineEnding(code)) { + effects.exit('atxHeading') + return ok(code) + } + + if (markdownSpace(code)) { + return factorySpace(effects, headingBreak, 'whitespace')(code) + } + + effects.enter('atxHeadingText') + return data(code) + } + + function sequence(code) { + if (code === 35) { + effects.consume(code) + return sequence + } + + effects.exit('atxHeadingSequence') + return headingBreak(code) + } + + function data(code) { + if (code === null || code === 35 || markdownLineEndingOrSpace(code)) { + effects.exit('atxHeadingText') + return headingBreak(code) + } + + effects.consume(code) + return data + } +} + +module.exports = headingAtx diff --git a/node_modules/micromark/dist/tokenize/html-flow.js b/node_modules/micromark/dist/tokenize/html-flow.js new file mode 100644 index 00000000..dc604bf7 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/html-flow.js @@ -0,0 +1,486 @@ +'use strict' + +var asciiAlpha = require('../character/ascii-alpha.js') +var asciiAlphanumeric = require('../character/ascii-alphanumeric.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var markdownSpace = require('../character/markdown-space.js') +var fromCharCode = require('../constant/from-char-code.js') +var htmlBlockNames = require('../constant/html-block-names.js') +var htmlRawNames = require('../constant/html-raw-names.js') +var partialBlankLine = require('./partial-blank-line.js') + +var htmlFlow = { + name: 'htmlFlow', + tokenize: tokenizeHtmlFlow, + resolveTo: resolveToHtmlFlow, + concrete: true +} +var nextBlankConstruct = { + tokenize: tokenizeNextBlank, + partial: true +} + +function resolveToHtmlFlow(events) { + var index = events.length + + while (index--) { + if (events[index][0] === 'enter' && events[index][1].type === 'htmlFlow') { + break + } + } + + if (index > 1 && events[index - 2][1].type === 'linePrefix') { + // Add the prefix start to the HTML token. + events[index][1].start = events[index - 2][1].start // Add the prefix start to the HTML line token. + + events[index + 1][1].start = events[index - 2][1].start // Remove the line prefix. + + events.splice(index - 2, 2) + } + + return events +} + +function tokenizeHtmlFlow(effects, ok, nok) { + var self = this + var kind + var startTag + var buffer + var index + var marker + return start + + function start(code) { + effects.enter('htmlFlow') + effects.enter('htmlFlowData') + effects.consume(code) + return open + } + + function open(code) { + if (code === 33) { + effects.consume(code) + return declarationStart + } + + if (code === 47) { + effects.consume(code) + return tagCloseStart + } + + if (code === 63) { + effects.consume(code) + kind = 3 // While we’re in an instruction instead of a declaration, we’re on a `?` + // right now, so we do need to search for `>`, similar to declarations. + + return self.interrupt ? ok : continuationDeclarationInside + } + + if (asciiAlpha(code)) { + effects.consume(code) + buffer = fromCharCode(code) + startTag = true + return tagName + } + + return nok(code) + } + + function declarationStart(code) { + if (code === 45) { + effects.consume(code) + kind = 2 + return commentOpenInside + } + + if (code === 91) { + effects.consume(code) + kind = 5 + buffer = 'CDATA[' + index = 0 + return cdataOpenInside + } + + if (asciiAlpha(code)) { + effects.consume(code) + kind = 4 + return self.interrupt ? ok : continuationDeclarationInside + } + + return nok(code) + } + + function commentOpenInside(code) { + if (code === 45) { + effects.consume(code) + return self.interrupt ? ok : continuationDeclarationInside + } + + return nok(code) + } + + function cdataOpenInside(code) { + if (code === buffer.charCodeAt(index++)) { + effects.consume(code) + return index === buffer.length + ? self.interrupt + ? ok + : continuation + : cdataOpenInside + } + + return nok(code) + } + + function tagCloseStart(code) { + if (asciiAlpha(code)) { + effects.consume(code) + buffer = fromCharCode(code) + return tagName + } + + return nok(code) + } + + function tagName(code) { + if ( + code === null || + code === 47 || + code === 62 || + markdownLineEndingOrSpace(code) + ) { + if ( + code !== 47 && + startTag && + htmlRawNames.indexOf(buffer.toLowerCase()) > -1 + ) { + kind = 1 + return self.interrupt ? ok(code) : continuation(code) + } + + if (htmlBlockNames.indexOf(buffer.toLowerCase()) > -1) { + kind = 6 + + if (code === 47) { + effects.consume(code) + return basicSelfClosing + } + + return self.interrupt ? ok(code) : continuation(code) + } + + kind = 7 // Do not support complete HTML when interrupting. + + return self.interrupt + ? nok(code) + : startTag + ? completeAttributeNameBefore(code) + : completeClosingTagAfter(code) + } + + if (code === 45 || asciiAlphanumeric(code)) { + effects.consume(code) + buffer += fromCharCode(code) + return tagName + } + + return nok(code) + } + + function basicSelfClosing(code) { + if (code === 62) { + effects.consume(code) + return self.interrupt ? ok : continuation + } + + return nok(code) + } + + function completeClosingTagAfter(code) { + if (markdownSpace(code)) { + effects.consume(code) + return completeClosingTagAfter + } + + return completeEnd(code) + } + + function completeAttributeNameBefore(code) { + if (code === 47) { + effects.consume(code) + return completeEnd + } + + if (code === 58 || code === 95 || asciiAlpha(code)) { + effects.consume(code) + return completeAttributeName + } + + if (markdownSpace(code)) { + effects.consume(code) + return completeAttributeNameBefore + } + + return completeEnd(code) + } + + function completeAttributeName(code) { + if ( + code === 45 || + code === 46 || + code === 58 || + code === 95 || + asciiAlphanumeric(code) + ) { + effects.consume(code) + return completeAttributeName + } + + return completeAttributeNameAfter(code) + } + + function completeAttributeNameAfter(code) { + if (code === 61) { + effects.consume(code) + return completeAttributeValueBefore + } + + if (markdownSpace(code)) { + effects.consume(code) + return completeAttributeNameAfter + } + + return completeAttributeNameBefore(code) + } + + function completeAttributeValueBefore(code) { + if ( + code === null || + code === 60 || + code === 61 || + code === 62 || + code === 96 + ) { + return nok(code) + } + + if (code === 34 || code === 39) { + effects.consume(code) + marker = code + return completeAttributeValueQuoted + } + + if (markdownSpace(code)) { + effects.consume(code) + return completeAttributeValueBefore + } + + marker = undefined + return completeAttributeValueUnquoted(code) + } + + function completeAttributeValueQuoted(code) { + if (code === marker) { + effects.consume(code) + return completeAttributeValueQuotedAfter + } + + if (code === null || markdownLineEnding(code)) { + return nok(code) + } + + effects.consume(code) + return completeAttributeValueQuoted + } + + function completeAttributeValueUnquoted(code) { + if ( + code === null || + code === 34 || + code === 39 || + code === 60 || + code === 61 || + code === 62 || + code === 96 || + markdownLineEndingOrSpace(code) + ) { + return completeAttributeNameAfter(code) + } + + effects.consume(code) + return completeAttributeValueUnquoted + } + + function completeAttributeValueQuotedAfter(code) { + if (code === 47 || code === 62 || markdownSpace(code)) { + return completeAttributeNameBefore(code) + } + + return nok(code) + } + + function completeEnd(code) { + if (code === 62) { + effects.consume(code) + return completeAfter + } + + return nok(code) + } + + function completeAfter(code) { + if (markdownSpace(code)) { + effects.consume(code) + return completeAfter + } + + return code === null || markdownLineEnding(code) + ? continuation(code) + : nok(code) + } + + function continuation(code) { + if (code === 45 && kind === 2) { + effects.consume(code) + return continuationCommentInside + } + + if (code === 60 && kind === 1) { + effects.consume(code) + return continuationRawTagOpen + } + + if (code === 62 && kind === 4) { + effects.consume(code) + return continuationClose + } + + if (code === 63 && kind === 3) { + effects.consume(code) + return continuationDeclarationInside + } + + if (code === 93 && kind === 5) { + effects.consume(code) + return continuationCharacterDataInside + } + + if (markdownLineEnding(code) && (kind === 6 || kind === 7)) { + return effects.check( + nextBlankConstruct, + continuationClose, + continuationAtLineEnding + )(code) + } + + if (code === null || markdownLineEnding(code)) { + return continuationAtLineEnding(code) + } + + effects.consume(code) + return continuation + } + + function continuationAtLineEnding(code) { + effects.exit('htmlFlowData') + return htmlContinueStart(code) + } + + function htmlContinueStart(code) { + if (code === null) { + return done(code) + } + + if (markdownLineEnding(code)) { + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + return htmlContinueStart + } + + effects.enter('htmlFlowData') + return continuation(code) + } + + function continuationCommentInside(code) { + if (code === 45) { + effects.consume(code) + return continuationDeclarationInside + } + + return continuation(code) + } + + function continuationRawTagOpen(code) { + if (code === 47) { + effects.consume(code) + buffer = '' + return continuationRawEndTag + } + + return continuation(code) + } + + function continuationRawEndTag(code) { + if (code === 62 && htmlRawNames.indexOf(buffer.toLowerCase()) > -1) { + effects.consume(code) + return continuationClose + } + + if (asciiAlpha(code) && buffer.length < 8) { + effects.consume(code) + buffer += fromCharCode(code) + return continuationRawEndTag + } + + return continuation(code) + } + + function continuationCharacterDataInside(code) { + if (code === 93) { + effects.consume(code) + return continuationDeclarationInside + } + + return continuation(code) + } + + function continuationDeclarationInside(code) { + if (code === 62) { + effects.consume(code) + return continuationClose + } + + return continuation(code) + } + + function continuationClose(code) { + if (code === null || markdownLineEnding(code)) { + effects.exit('htmlFlowData') + return done(code) + } + + effects.consume(code) + return continuationClose + } + + function done(code) { + effects.exit('htmlFlow') + return ok(code) + } +} + +function tokenizeNextBlank(effects, ok, nok) { + return start + + function start(code) { + effects.exit('htmlFlowData') + effects.enter('lineEndingBlank') + effects.consume(code) + effects.exit('lineEndingBlank') + return effects.attempt(partialBlankLine, ok, nok) + } +} + +module.exports = htmlFlow diff --git a/node_modules/micromark/dist/tokenize/html-text.js b/node_modules/micromark/dist/tokenize/html-text.js new file mode 100644 index 00000000..92d1eeec --- /dev/null +++ b/node_modules/micromark/dist/tokenize/html-text.js @@ -0,0 +1,435 @@ +'use strict' + +var asciiAlpha = require('../character/ascii-alpha.js') +var asciiAlphanumeric = require('../character/ascii-alphanumeric.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var markdownSpace = require('../character/markdown-space.js') +var factorySpace = require('./factory-space.js') + +var htmlText = { + name: 'htmlText', + tokenize: tokenizeHtmlText +} + +function tokenizeHtmlText(effects, ok, nok) { + var self = this + var marker + var buffer + var index + var returnState + return start + + function start(code) { + effects.enter('htmlText') + effects.enter('htmlTextData') + effects.consume(code) + return open + } + + function open(code) { + if (code === 33) { + effects.consume(code) + return declarationOpen + } + + if (code === 47) { + effects.consume(code) + return tagCloseStart + } + + if (code === 63) { + effects.consume(code) + return instruction + } + + if (asciiAlpha(code)) { + effects.consume(code) + return tagOpen + } + + return nok(code) + } + + function declarationOpen(code) { + if (code === 45) { + effects.consume(code) + return commentOpen + } + + if (code === 91) { + effects.consume(code) + buffer = 'CDATA[' + index = 0 + return cdataOpen + } + + if (asciiAlpha(code)) { + effects.consume(code) + return declaration + } + + return nok(code) + } + + function commentOpen(code) { + if (code === 45) { + effects.consume(code) + return commentStart + } + + return nok(code) + } + + function commentStart(code) { + if (code === null || code === 62) { + return nok(code) + } + + if (code === 45) { + effects.consume(code) + return commentStartDash + } + + return comment(code) + } + + function commentStartDash(code) { + if (code === null || code === 62) { + return nok(code) + } + + return comment(code) + } + + function comment(code) { + if (code === null) { + return nok(code) + } + + if (code === 45) { + effects.consume(code) + return commentClose + } + + if (markdownLineEnding(code)) { + returnState = comment + return atLineEnding(code) + } + + effects.consume(code) + return comment + } + + function commentClose(code) { + if (code === 45) { + effects.consume(code) + return end + } + + return comment(code) + } + + function cdataOpen(code) { + if (code === buffer.charCodeAt(index++)) { + effects.consume(code) + return index === buffer.length ? cdata : cdataOpen + } + + return nok(code) + } + + function cdata(code) { + if (code === null) { + return nok(code) + } + + if (code === 93) { + effects.consume(code) + return cdataClose + } + + if (markdownLineEnding(code)) { + returnState = cdata + return atLineEnding(code) + } + + effects.consume(code) + return cdata + } + + function cdataClose(code) { + if (code === 93) { + effects.consume(code) + return cdataEnd + } + + return cdata(code) + } + + function cdataEnd(code) { + if (code === 62) { + return end(code) + } + + if (code === 93) { + effects.consume(code) + return cdataEnd + } + + return cdata(code) + } + + function declaration(code) { + if (code === null || code === 62) { + return end(code) + } + + if (markdownLineEnding(code)) { + returnState = declaration + return atLineEnding(code) + } + + effects.consume(code) + return declaration + } + + function instruction(code) { + if (code === null) { + return nok(code) + } + + if (code === 63) { + effects.consume(code) + return instructionClose + } + + if (markdownLineEnding(code)) { + returnState = instruction + return atLineEnding(code) + } + + effects.consume(code) + return instruction + } + + function instructionClose(code) { + return code === 62 ? end(code) : instruction(code) + } + + function tagCloseStart(code) { + if (asciiAlpha(code)) { + effects.consume(code) + return tagClose + } + + return nok(code) + } + + function tagClose(code) { + if (code === 45 || asciiAlphanumeric(code)) { + effects.consume(code) + return tagClose + } + + return tagCloseBetween(code) + } + + function tagCloseBetween(code) { + if (markdownLineEnding(code)) { + returnState = tagCloseBetween + return atLineEnding(code) + } + + if (markdownSpace(code)) { + effects.consume(code) + return tagCloseBetween + } + + return end(code) + } + + function tagOpen(code) { + if (code === 45 || asciiAlphanumeric(code)) { + effects.consume(code) + return tagOpen + } + + if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) { + return tagOpenBetween(code) + } + + return nok(code) + } + + function tagOpenBetween(code) { + if (code === 47) { + effects.consume(code) + return end + } + + if (code === 58 || code === 95 || asciiAlpha(code)) { + effects.consume(code) + return tagOpenAttributeName + } + + if (markdownLineEnding(code)) { + returnState = tagOpenBetween + return atLineEnding(code) + } + + if (markdownSpace(code)) { + effects.consume(code) + return tagOpenBetween + } + + return end(code) + } + + function tagOpenAttributeName(code) { + if ( + code === 45 || + code === 46 || + code === 58 || + code === 95 || + asciiAlphanumeric(code) + ) { + effects.consume(code) + return tagOpenAttributeName + } + + return tagOpenAttributeNameAfter(code) + } + + function tagOpenAttributeNameAfter(code) { + if (code === 61) { + effects.consume(code) + return tagOpenAttributeValueBefore + } + + if (markdownLineEnding(code)) { + returnState = tagOpenAttributeNameAfter + return atLineEnding(code) + } + + if (markdownSpace(code)) { + effects.consume(code) + return tagOpenAttributeNameAfter + } + + return tagOpenBetween(code) + } + + function tagOpenAttributeValueBefore(code) { + if ( + code === null || + code === 60 || + code === 61 || + code === 62 || + code === 96 + ) { + return nok(code) + } + + if (code === 34 || code === 39) { + effects.consume(code) + marker = code + return tagOpenAttributeValueQuoted + } + + if (markdownLineEnding(code)) { + returnState = tagOpenAttributeValueBefore + return atLineEnding(code) + } + + if (markdownSpace(code)) { + effects.consume(code) + return tagOpenAttributeValueBefore + } + + effects.consume(code) + marker = undefined + return tagOpenAttributeValueUnquoted + } + + function tagOpenAttributeValueQuoted(code) { + if (code === marker) { + effects.consume(code) + return tagOpenAttributeValueQuotedAfter + } + + if (code === null) { + return nok(code) + } + + if (markdownLineEnding(code)) { + returnState = tagOpenAttributeValueQuoted + return atLineEnding(code) + } + + effects.consume(code) + return tagOpenAttributeValueQuoted + } + + function tagOpenAttributeValueQuotedAfter(code) { + if (code === 62 || code === 47 || markdownLineEndingOrSpace(code)) { + return tagOpenBetween(code) + } + + return nok(code) + } + + function tagOpenAttributeValueUnquoted(code) { + if ( + code === null || + code === 34 || + code === 39 || + code === 60 || + code === 61 || + code === 96 + ) { + return nok(code) + } + + if (code === 62 || markdownLineEndingOrSpace(code)) { + return tagOpenBetween(code) + } + + effects.consume(code) + return tagOpenAttributeValueUnquoted + } // We can’t have blank lines in content, so no need to worry about empty + // tokens. + + function atLineEnding(code) { + effects.exit('htmlTextData') + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + return factorySpace( + effects, + afterPrefix, + 'linePrefix', + self.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : 4 + ) + } + + function afterPrefix(code) { + effects.enter('htmlTextData') + return returnState(code) + } + + function end(code) { + if (code === 62) { + effects.consume(code) + effects.exit('htmlTextData') + effects.exit('htmlText') + return ok + } + + return nok(code) + } +} + +module.exports = htmlText diff --git a/node_modules/micromark/dist/tokenize/label-end.js b/node_modules/micromark/dist/tokenize/label-end.js new file mode 100644 index 00000000..9e8ffce8 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/label-end.js @@ -0,0 +1,330 @@ +'use strict' + +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var chunkedPush = require('../util/chunked-push.js') +var chunkedSplice = require('../util/chunked-splice.js') +var normalizeIdentifier = require('../util/normalize-identifier.js') +var resolveAll = require('../util/resolve-all.js') +var shallow = require('../util/shallow.js') +var factoryDestination = require('./factory-destination.js') +var factoryLabel = require('./factory-label.js') +var factoryTitle = require('./factory-title.js') +var factoryWhitespace = require('./factory-whitespace.js') + +var labelEnd = { + name: 'labelEnd', + tokenize: tokenizeLabelEnd, + resolveTo: resolveToLabelEnd, + resolveAll: resolveAllLabelEnd +} +var resourceConstruct = { + tokenize: tokenizeResource +} +var fullReferenceConstruct = { + tokenize: tokenizeFullReference +} +var collapsedReferenceConstruct = { + tokenize: tokenizeCollapsedReference +} + +function resolveAllLabelEnd(events) { + var index = -1 + var token + + while (++index < events.length) { + token = events[index][1] + + if ( + !token._used && + (token.type === 'labelImage' || + token.type === 'labelLink' || + token.type === 'labelEnd') + ) { + // Remove the marker. + events.splice(index + 1, token.type === 'labelImage' ? 4 : 2) + token.type = 'data' + index++ + } + } + + return events +} + +function resolveToLabelEnd(events, context) { + var index = events.length + var offset = 0 + var group + var label + var text + var token + var open + var close + var media // Find an opening. + + while (index--) { + token = events[index][1] + + if (open) { + // If we see another link, or inactive link label, we’ve been here before. + if ( + token.type === 'link' || + (token.type === 'labelLink' && token._inactive) + ) { + break + } // Mark other link openings as inactive, as we can’t have links in + // links. + + if (events[index][0] === 'enter' && token.type === 'labelLink') { + token._inactive = true + } + } else if (close) { + if ( + events[index][0] === 'enter' && + (token.type === 'labelImage' || token.type === 'labelLink') && + !token._balanced + ) { + open = index + + if (token.type !== 'labelLink') { + offset = 2 + break + } + } + } else if (token.type === 'labelEnd') { + close = index + } + } + + group = { + type: events[open][1].type === 'labelLink' ? 'link' : 'image', + start: shallow(events[open][1].start), + end: shallow(events[events.length - 1][1].end) + } + label = { + type: 'label', + start: shallow(events[open][1].start), + end: shallow(events[close][1].end) + } + text = { + type: 'labelText', + start: shallow(events[open + offset + 2][1].end), + end: shallow(events[close - 2][1].start) + } + media = [ + ['enter', group, context], + ['enter', label, context] + ] // Opening marker. + + media = chunkedPush(media, events.slice(open + 1, open + offset + 3)) // Text open. + + media = chunkedPush(media, [['enter', text, context]]) // Between. + + media = chunkedPush( + media, + resolveAll( + context.parser.constructs.insideSpan.null, + events.slice(open + offset + 4, close - 3), + context + ) + ) // Text close, marker close, label close. + + media = chunkedPush(media, [ + ['exit', text, context], + events[close - 2], + events[close - 1], + ['exit', label, context] + ]) // Reference, resource, or so. + + media = chunkedPush(media, events.slice(close + 1)) // Media close. + + media = chunkedPush(media, [['exit', group, context]]) + chunkedSplice(events, open, events.length, media) + return events +} + +function tokenizeLabelEnd(effects, ok, nok) { + var self = this + var index = self.events.length + var labelStart + var defined // Find an opening. + + while (index--) { + if ( + (self.events[index][1].type === 'labelImage' || + self.events[index][1].type === 'labelLink') && + !self.events[index][1]._balanced + ) { + labelStart = self.events[index][1] + break + } + } + + return start + + function start(code) { + if (!labelStart) { + return nok(code) + } // It’s a balanced bracket, but contains a link. + + if (labelStart._inactive) return balanced(code) + defined = + self.parser.defined.indexOf( + normalizeIdentifier( + self.sliceSerialize({ + start: labelStart.end, + end: self.now() + }) + ) + ) > -1 + effects.enter('labelEnd') + effects.enter('labelMarker') + effects.consume(code) + effects.exit('labelMarker') + effects.exit('labelEnd') + return afterLabelEnd + } + + function afterLabelEnd(code) { + // Resource: `[asd](fgh)`. + if (code === 40) { + return effects.attempt( + resourceConstruct, + ok, + defined ? ok : balanced + )(code) + } // Collapsed (`[asd][]`) or full (`[asd][fgh]`) reference? + + if (code === 91) { + return effects.attempt( + fullReferenceConstruct, + ok, + defined + ? effects.attempt(collapsedReferenceConstruct, ok, balanced) + : balanced + )(code) + } // Shortcut reference: `[asd]`? + + return defined ? ok(code) : balanced(code) + } + + function balanced(code) { + labelStart._balanced = true + return nok(code) + } +} + +function tokenizeResource(effects, ok, nok) { + return start + + function start(code) { + effects.enter('resource') + effects.enter('resourceMarker') + effects.consume(code) + effects.exit('resourceMarker') + return factoryWhitespace(effects, open) + } + + function open(code) { + if (code === 41) { + return end(code) + } + + return factoryDestination( + effects, + destinationAfter, + nok, + 'resourceDestination', + 'resourceDestinationLiteral', + 'resourceDestinationLiteralMarker', + 'resourceDestinationRaw', + 'resourceDestinationString', + 3 + )(code) + } + + function destinationAfter(code) { + return markdownLineEndingOrSpace(code) + ? factoryWhitespace(effects, between)(code) + : end(code) + } + + function between(code) { + if (code === 34 || code === 39 || code === 40) { + return factoryTitle( + effects, + factoryWhitespace(effects, end), + nok, + 'resourceTitle', + 'resourceTitleMarker', + 'resourceTitleString' + )(code) + } + + return end(code) + } + + function end(code) { + if (code === 41) { + effects.enter('resourceMarker') + effects.consume(code) + effects.exit('resourceMarker') + effects.exit('resource') + return ok + } + + return nok(code) + } +} + +function tokenizeFullReference(effects, ok, nok) { + var self = this + return start + + function start(code) { + return factoryLabel.call( + self, + effects, + afterLabel, + nok, + 'reference', + 'referenceMarker', + 'referenceString' + )(code) + } + + function afterLabel(code) { + return self.parser.defined.indexOf( + normalizeIdentifier( + self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1) + ) + ) < 0 + ? nok(code) + : ok(code) + } +} + +function tokenizeCollapsedReference(effects, ok, nok) { + return start + + function start(code) { + effects.enter('reference') + effects.enter('referenceMarker') + effects.consume(code) + effects.exit('referenceMarker') + return open + } + + function open(code) { + if (code === 93) { + effects.enter('referenceMarker') + effects.consume(code) + effects.exit('referenceMarker') + effects.exit('reference') + return ok + } + + return nok(code) + } +} + +module.exports = labelEnd diff --git a/node_modules/micromark/dist/tokenize/label-start-image.js b/node_modules/micromark/dist/tokenize/label-start-image.js new file mode 100644 index 00000000..90bc3d90 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/label-start-image.js @@ -0,0 +1,46 @@ +'use strict' + +var labelEnd = require('./label-end.js') + +var labelStartImage = { + name: 'labelStartImage', + tokenize: tokenizeLabelStartImage, + resolveAll: labelEnd.resolveAll +} + +function tokenizeLabelStartImage(effects, ok, nok) { + var self = this + return start + + function start(code) { + effects.enter('labelImage') + effects.enter('labelImageMarker') + effects.consume(code) + effects.exit('labelImageMarker') + return open + } + + function open(code) { + if (code === 91) { + effects.enter('labelMarker') + effects.consume(code) + effects.exit('labelMarker') + effects.exit('labelImage') + return after + } + + return nok(code) + } + + function after(code) { + /* c8 ignore next */ + return code === 94 && + /* c8 ignore next */ + '_hiddenFootnoteSupport' in self.parser.constructs + ? /* c8 ignore next */ + nok(code) + : ok(code) + } +} + +module.exports = labelStartImage diff --git a/node_modules/micromark/dist/tokenize/label-start-link.js b/node_modules/micromark/dist/tokenize/label-start-link.js new file mode 100644 index 00000000..22942059 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/label-start-link.js @@ -0,0 +1,35 @@ +'use strict' + +var labelEnd = require('./label-end.js') + +var labelStartLink = { + name: 'labelStartLink', + tokenize: tokenizeLabelStartLink, + resolveAll: labelEnd.resolveAll +} + +function tokenizeLabelStartLink(effects, ok, nok) { + var self = this + return start + + function start(code) { + effects.enter('labelLink') + effects.enter('labelMarker') + effects.consume(code) + effects.exit('labelMarker') + effects.exit('labelLink') + return after + } + + function after(code) { + /* c8 ignore next */ + return code === 94 && + /* c8 ignore next */ + '_hiddenFootnoteSupport' in self.parser.constructs + ? /* c8 ignore next */ + nok(code) + : ok(code) + } +} + +module.exports = labelStartLink diff --git a/node_modules/micromark/dist/tokenize/line-ending.js b/node_modules/micromark/dist/tokenize/line-ending.js new file mode 100644 index 00000000..d381f6dc --- /dev/null +++ b/node_modules/micromark/dist/tokenize/line-ending.js @@ -0,0 +1,21 @@ +'use strict' + +var factorySpace = require('./factory-space.js') + +var lineEnding = { + name: 'lineEnding', + tokenize: tokenizeLineEnding +} + +function tokenizeLineEnding(effects, ok) { + return start + + function start(code) { + effects.enter('lineEnding') + effects.consume(code) + effects.exit('lineEnding') + return factorySpace(effects, ok, 'linePrefix') + } +} + +module.exports = lineEnding diff --git a/node_modules/micromark/dist/tokenize/list.js b/node_modules/micromark/dist/tokenize/list.js new file mode 100644 index 00000000..21f14c37 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/list.js @@ -0,0 +1,214 @@ +'use strict' + +var asciiDigit = require('../character/ascii-digit.js') +var markdownSpace = require('../character/markdown-space.js') +var prefixSize = require('../util/prefix-size.js') +var sizeChunks = require('../util/size-chunks.js') +var factorySpace = require('./factory-space.js') +var partialBlankLine = require('./partial-blank-line.js') +var thematicBreak = require('./thematic-break.js') + +var list = { + name: 'list', + tokenize: tokenizeListStart, + continuation: { + tokenize: tokenizeListContinuation + }, + exit: tokenizeListEnd +} +var listItemPrefixWhitespaceConstruct = { + tokenize: tokenizeListItemPrefixWhitespace, + partial: true +} +var indentConstruct = { + tokenize: tokenizeIndent, + partial: true +} + +function tokenizeListStart(effects, ok, nok) { + var self = this + var initialSize = prefixSize(self.events, 'linePrefix') + var size = 0 + return start + + function start(code) { + var kind = + self.containerState.type || + (code === 42 || code === 43 || code === 45 + ? 'listUnordered' + : 'listOrdered') + + if ( + kind === 'listUnordered' + ? !self.containerState.marker || code === self.containerState.marker + : asciiDigit(code) + ) { + if (!self.containerState.type) { + self.containerState.type = kind + effects.enter(kind, { + _container: true + }) + } + + if (kind === 'listUnordered') { + effects.enter('listItemPrefix') + return code === 42 || code === 45 + ? effects.check(thematicBreak, nok, atMarker)(code) + : atMarker(code) + } + + if (!self.interrupt || code === 49) { + effects.enter('listItemPrefix') + effects.enter('listItemValue') + return inside(code) + } + } + + return nok(code) + } + + function inside(code) { + if (asciiDigit(code) && ++size < 10) { + effects.consume(code) + return inside + } + + if ( + (!self.interrupt || size < 2) && + (self.containerState.marker + ? code === self.containerState.marker + : code === 41 || code === 46) + ) { + effects.exit('listItemValue') + return atMarker(code) + } + + return nok(code) + } + + function atMarker(code) { + effects.enter('listItemMarker') + effects.consume(code) + effects.exit('listItemMarker') + self.containerState.marker = self.containerState.marker || code + return effects.check( + partialBlankLine, // Can’t be empty when interrupting. + self.interrupt ? nok : onBlank, + effects.attempt( + listItemPrefixWhitespaceConstruct, + endOfPrefix, + otherPrefix + ) + ) + } + + function onBlank(code) { + self.containerState.initialBlankLine = true + initialSize++ + return endOfPrefix(code) + } + + function otherPrefix(code) { + if (markdownSpace(code)) { + effects.enter('listItemPrefixWhitespace') + effects.consume(code) + effects.exit('listItemPrefixWhitespace') + return endOfPrefix + } + + return nok(code) + } + + function endOfPrefix(code) { + self.containerState.size = + initialSize + sizeChunks(self.sliceStream(effects.exit('listItemPrefix'))) + return ok(code) + } +} + +function tokenizeListContinuation(effects, ok, nok) { + var self = this + self.containerState._closeFlow = undefined + return effects.check(partialBlankLine, onBlank, notBlank) + + function onBlank(code) { + self.containerState.furtherBlankLines = + self.containerState.furtherBlankLines || + self.containerState.initialBlankLine // We have a blank line. + // Still, try to consume at most the items size. + + return factorySpace( + effects, + ok, + 'listItemIndent', + self.containerState.size + 1 + )(code) + } + + function notBlank(code) { + if (self.containerState.furtherBlankLines || !markdownSpace(code)) { + self.containerState.furtherBlankLines = self.containerState.initialBlankLine = undefined + return notInCurrentItem(code) + } + + self.containerState.furtherBlankLines = self.containerState.initialBlankLine = undefined + return effects.attempt(indentConstruct, ok, notInCurrentItem)(code) + } + + function notInCurrentItem(code) { + // While we do continue, we signal that the flow should be closed. + self.containerState._closeFlow = true // As we’re closing flow, we’re no longer interrupting. + + self.interrupt = undefined + return factorySpace( + effects, + effects.attempt(list, ok, nok), + 'linePrefix', + self.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : 4 + )(code) + } +} + +function tokenizeIndent(effects, ok, nok) { + var self = this + return factorySpace( + effects, + afterPrefix, + 'listItemIndent', + self.containerState.size + 1 + ) + + function afterPrefix(code) { + return prefixSize(self.events, 'listItemIndent') === + self.containerState.size + ? ok(code) + : nok(code) + } +} + +function tokenizeListEnd(effects) { + effects.exit(this.containerState.type) +} + +function tokenizeListItemPrefixWhitespace(effects, ok, nok) { + var self = this + return factorySpace( + effects, + afterPrefix, + 'listItemPrefixWhitespace', + self.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : 4 + 1 + ) + + function afterPrefix(code) { + return markdownSpace(code) || + !prefixSize(self.events, 'listItemPrefixWhitespace') + ? nok(code) + : ok(code) + } +} + +module.exports = list diff --git a/node_modules/micromark/dist/tokenize/partial-blank-line.js b/node_modules/micromark/dist/tokenize/partial-blank-line.js new file mode 100644 index 00000000..b5207df2 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/partial-blank-line.js @@ -0,0 +1,19 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var factorySpace = require('./factory-space.js') + +var partialBlankLine = { + tokenize: tokenizePartialBlankLine, + partial: true +} + +function tokenizePartialBlankLine(effects, ok, nok) { + return factorySpace(effects, afterWhitespace, 'linePrefix') + + function afterWhitespace(code) { + return code === null || markdownLineEnding(code) ? ok(code) : nok(code) + } +} + +module.exports = partialBlankLine diff --git a/node_modules/micromark/dist/tokenize/setext-underline.js b/node_modules/micromark/dist/tokenize/setext-underline.js new file mode 100644 index 00000000..4f277070 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/setext-underline.js @@ -0,0 +1,117 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var shallow = require('../util/shallow.js') +var factorySpace = require('./factory-space.js') + +var setextUnderline = { + name: 'setextUnderline', + tokenize: tokenizeSetextUnderline, + resolveTo: resolveToSetextUnderline +} + +function resolveToSetextUnderline(events, context) { + var index = events.length + var content + var text + var definition + var heading // Find the opening of the content. + // It’ll always exist: we don’t tokenize if it isn’t there. + + while (index--) { + if (events[index][0] === 'enter') { + if (events[index][1].type === 'content') { + content = index + break + } + + if (events[index][1].type === 'paragraph') { + text = index + } + } // Exit + else { + if (events[index][1].type === 'content') { + // Remove the content end (if needed we’ll add it later) + events.splice(index, 1) + } + + if (!definition && events[index][1].type === 'definition') { + definition = index + } + } + } + + heading = { + type: 'setextHeading', + start: shallow(events[text][1].start), + end: shallow(events[events.length - 1][1].end) + } // Change the paragraph to setext heading text. + + events[text][1].type = 'setextHeadingText' // If we have definitions in the content, we’ll keep on having content, + // but we need move it. + + if (definition) { + events.splice(text, 0, ['enter', heading, context]) + events.splice(definition + 1, 0, ['exit', events[content][1], context]) + events[content][1].end = shallow(events[definition][1].end) + } else { + events[content][1] = heading + } // Add the heading exit at the end. + + events.push(['exit', heading, context]) + return events +} + +function tokenizeSetextUnderline(effects, ok, nok) { + var self = this + var index = self.events.length + var marker + var paragraph // Find an opening. + + while (index--) { + // Skip enter/exit of line ending, line prefix, and content. + // We can now either have a definition or a paragraph. + if ( + self.events[index][1].type !== 'lineEnding' && + self.events[index][1].type !== 'linePrefix' && + self.events[index][1].type !== 'content' + ) { + paragraph = self.events[index][1].type === 'paragraph' + break + } + } + + return start + + function start(code) { + if (!self.lazy && (self.interrupt || paragraph)) { + effects.enter('setextHeadingLine') + effects.enter('setextHeadingLineSequence') + marker = code + return closingSequence(code) + } + + return nok(code) + } + + function closingSequence(code) { + if (code === marker) { + effects.consume(code) + return closingSequence + } + + effects.exit('setextHeadingLineSequence') + return factorySpace(effects, closingSequenceEnd, 'lineSuffix')(code) + } + + function closingSequenceEnd(code) { + if (code === null || markdownLineEnding(code)) { + effects.exit('setextHeadingLine') + return ok(code) + } + + return nok(code) + } +} + +module.exports = setextUnderline diff --git a/node_modules/micromark/dist/tokenize/thematic-break.js b/node_modules/micromark/dist/tokenize/thematic-break.js new file mode 100644 index 00000000..3abbe554 --- /dev/null +++ b/node_modules/micromark/dist/tokenize/thematic-break.js @@ -0,0 +1,53 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownSpace = require('../character/markdown-space.js') +var factorySpace = require('./factory-space.js') + +var thematicBreak = { + name: 'thematicBreak', + tokenize: tokenizeThematicBreak +} + +function tokenizeThematicBreak(effects, ok, nok) { + var size = 0 + var marker + return start + + function start(code) { + effects.enter('thematicBreak') + marker = code + return atBreak(code) + } + + function atBreak(code) { + if (code === marker) { + effects.enter('thematicBreakSequence') + return sequence(code) + } + + if (markdownSpace(code)) { + return factorySpace(effects, atBreak, 'whitespace')(code) + } + + if (size < 3 || (code !== null && !markdownLineEnding(code))) { + return nok(code) + } + + effects.exit('thematicBreak') + return ok(code) + } + + function sequence(code) { + if (code === marker) { + effects.consume(code) + size++ + return sequence + } + + effects.exit('thematicBreakSequence') + return atBreak(code) + } +} + +module.exports = thematicBreak diff --git a/node_modules/micromark/dist/util/chunked-push.js b/node_modules/micromark/dist/util/chunked-push.js new file mode 100644 index 00000000..77689779 --- /dev/null +++ b/node_modules/micromark/dist/util/chunked-push.js @@ -0,0 +1,14 @@ +'use strict' + +var chunkedSplice = require('./chunked-splice.js') + +function chunkedPush(list, items) { + if (list.length) { + chunkedSplice(list, list.length, 0, items) + return list + } + + return items +} + +module.exports = chunkedPush diff --git a/node_modules/micromark/dist/util/chunked-splice.js b/node_modules/micromark/dist/util/chunked-splice.js new file mode 100644 index 00000000..99525d76 --- /dev/null +++ b/node_modules/micromark/dist/util/chunked-splice.js @@ -0,0 +1,38 @@ +'use strict' + +var splice = require('../constant/splice.js') + +// causes a stack overflow in V8 when trying to insert 100k items for instance. + +function chunkedSplice(list, start, remove, items) { + var end = list.length + var chunkStart = 0 + var parameters // Make start between zero and `end` (included). + + if (start < 0) { + start = -start > end ? 0 : end + start + } else { + start = start > end ? end : start + } + + remove = remove > 0 ? remove : 0 // No need to chunk the items if there’s only a couple (10k) items. + + if (items.length < 10000) { + parameters = Array.from(items) + parameters.unshift(start, remove) + splice.apply(list, parameters) + } else { + // Delete `remove` items starting from `start` + if (remove) splice.apply(list, [start, remove]) // Insert the items in chunks to not cause stack overflows. + + while (chunkStart < items.length) { + parameters = items.slice(chunkStart, chunkStart + 10000) + parameters.unshift(start, 0) + splice.apply(list, parameters) + chunkStart += 10000 + start += 10000 + } + } +} + +module.exports = chunkedSplice diff --git a/node_modules/micromark/dist/util/classify-character.js b/node_modules/micromark/dist/util/classify-character.js new file mode 100644 index 00000000..9d3b21b9 --- /dev/null +++ b/node_modules/micromark/dist/util/classify-character.js @@ -0,0 +1,25 @@ +'use strict' + +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var unicodePunctuation = require('../character/unicode-punctuation.js') +var unicodeWhitespace = require('../character/unicode-whitespace.js') + +// Classify whether a character is unicode whitespace, unicode punctuation, or +// anything else. +// Used for attention (emphasis, strong), whose sequences can open or close +// based on the class of surrounding characters. +function classifyCharacter(code) { + if ( + code === null || + markdownLineEndingOrSpace(code) || + unicodeWhitespace(code) + ) { + return 1 + } + + if (unicodePunctuation(code)) { + return 2 + } +} + +module.exports = classifyCharacter diff --git a/node_modules/micromark/dist/util/combine-extensions.js b/node_modules/micromark/dist/util/combine-extensions.js new file mode 100644 index 00000000..a6f8f347 --- /dev/null +++ b/node_modules/micromark/dist/util/combine-extensions.js @@ -0,0 +1,49 @@ +'use strict' + +var hasOwnProperty = require('../constant/has-own-property.js') +var chunkedSplice = require('./chunked-splice.js') +var miniflat = require('./miniflat.js') + +function combineExtensions(extensions) { + var all = {} + var index = -1 + + while (++index < extensions.length) { + extension(all, extensions[index]) + } + + return all +} + +function extension(all, extension) { + var hook + var left + var right + var code + + for (hook in extension) { + left = hasOwnProperty.call(all, hook) ? all[hook] : (all[hook] = {}) + right = extension[hook] + + for (code in right) { + left[code] = constructs( + miniflat(right[code]), + hasOwnProperty.call(left, code) ? left[code] : [] + ) + } + } +} + +function constructs(list, existing) { + var index = -1 + var before = [] + + while (++index < list.length) { + ;(list[index].add === 'after' ? existing : before).push(list[index]) + } + + chunkedSplice(existing, 0, 0, before) + return existing +} + +module.exports = combineExtensions diff --git a/node_modules/micromark/dist/util/combine-html-extensions.js b/node_modules/micromark/dist/util/combine-html-extensions.js new file mode 100644 index 00000000..c5425878 --- /dev/null +++ b/node_modules/micromark/dist/util/combine-html-extensions.js @@ -0,0 +1,34 @@ +'use strict' + +var hasOwnProperty = require('../constant/has-own-property.js') + +function combineHtmlExtensions(extensions) { + var handlers = {} + var index = -1 + + while (++index < extensions.length) { + extension(handlers, extensions[index]) + } + + return handlers +} + +function extension(handlers, extension) { + var hook + var left + var right + var type + + for (hook in extension) { + left = hasOwnProperty.call(handlers, hook) + ? handlers[hook] + : (handlers[hook] = {}) + right = extension[hook] + + for (type in right) { + left[type] = right[type] + } + } +} + +module.exports = combineHtmlExtensions diff --git a/node_modules/micromark/dist/util/create-tokenizer.js b/node_modules/micromark/dist/util/create-tokenizer.js new file mode 100644 index 00000000..9051658c --- /dev/null +++ b/node_modules/micromark/dist/util/create-tokenizer.js @@ -0,0 +1,316 @@ +'use strict' + +var assign = require('../constant/assign.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var chunkedPush = require('./chunked-push.js') +var chunkedSplice = require('./chunked-splice.js') +var miniflat = require('./miniflat.js') +var resolveAll = require('./resolve-all.js') +var serializeChunks = require('./serialize-chunks.js') +var shallow = require('./shallow.js') +var sliceChunks = require('./slice-chunks.js') + +// Create a tokenizer. +// Tokenizers deal with one type of data (e.g., containers, flow, text). +// The parser is the object dealing with it all. +// `initialize` works like other constructs, except that only its `tokenize` +// function is used, in which case it doesn’t receive an `ok` or `nok`. +// `from` can be given to set the point before the first character, although +// when further lines are indented, they must be set with `defineSkip`. +function createTokenizer(parser, initialize, from) { + var point = from + ? shallow(from) + : { + line: 1, + column: 1, + offset: 0 + } + var columnStart = {} + var resolveAllConstructs = [] + var chunks = [] + var stack = [] + + var effects = { + consume: consume, + enter: enter, + exit: exit, + attempt: constructFactory(onsuccessfulconstruct), + check: constructFactory(onsuccessfulcheck), + interrupt: constructFactory(onsuccessfulcheck, { + interrupt: true + }), + lazy: constructFactory(onsuccessfulcheck, { + lazy: true + }) + } // State and tools for resolving and serializing. + + var context = { + previous: null, + events: [], + parser: parser, + sliceStream: sliceStream, + sliceSerialize: sliceSerialize, + now: now, + defineSkip: skip, + write: write + } // The state function. + + var state = initialize.tokenize.call(context, effects) // Track which character we expect to be consumed, to catch bugs. + + if (initialize.resolveAll) { + resolveAllConstructs.push(initialize) + } // Store where we are in the input stream. + + point._index = 0 + point._bufferIndex = -1 + return context + + function write(slice) { + chunks = chunkedPush(chunks, slice) + main() // Exit if we’re not done, resolve might change stuff. + + if (chunks[chunks.length - 1] !== null) { + return [] + } + + addResult(initialize, 0) // Otherwise, resolve, and exit. + + context.events = resolveAll(resolveAllConstructs, context.events, context) + return context.events + } // + // Tools. + // + + function sliceSerialize(token) { + return serializeChunks(sliceStream(token)) + } + + function sliceStream(token) { + return sliceChunks(chunks, token) + } + + function now() { + return shallow(point) + } + + function skip(value) { + columnStart[value.line] = value.column + accountForPotentialSkip() + } // + // State management. + // + // Main loop (note that `_index` and `_bufferIndex` in `point` are modified by + // `consume`). + // Here is where we walk through the chunks, which either include strings of + // several characters, or numerical character codes. + // The reason to do this in a loop instead of a call is so the stack can + // drain. + + function main() { + var chunkIndex + var chunk + + while (point._index < chunks.length) { + chunk = chunks[point._index] // If we’re in a buffer chunk, loop through it. + + if (typeof chunk === 'string') { + chunkIndex = point._index + + if (point._bufferIndex < 0) { + point._bufferIndex = 0 + } + + while ( + point._index === chunkIndex && + point._bufferIndex < chunk.length + ) { + go(chunk.charCodeAt(point._bufferIndex)) + } + } else { + go(chunk) + } + } + } // Deal with one code. + + function go(code) { + state = state(code) + } // Move a character forward. + + function consume(code) { + if (markdownLineEnding(code)) { + point.line++ + point.column = 1 + point.offset += code === -3 ? 2 : 1 + accountForPotentialSkip() + } else if (code !== -1) { + point.column++ + point.offset++ + } // Not in a string chunk. + + if (point._bufferIndex < 0) { + point._index++ + } else { + point._bufferIndex++ // At end of string chunk. + + if (point._bufferIndex === chunks[point._index].length) { + point._bufferIndex = -1 + point._index++ + } + } // Expose the previous character. + + context.previous = code // Mark as consumed. + } // Start a token. + + function enter(type, fields) { + var token = fields || {} + token.type = type + token.start = now() + context.events.push(['enter', token, context]) + stack.push(token) + return token + } // Stop a token. + + function exit(type) { + var token = stack.pop() + token.end = now() + context.events.push(['exit', token, context]) + return token + } // Use results. + + function onsuccessfulconstruct(construct, info) { + addResult(construct, info.from) + } // Discard results. + + function onsuccessfulcheck(construct, info) { + info.restore() + } // Factory to attempt/check/interrupt. + + function constructFactory(onreturn, fields) { + return hook // Handle either an object mapping codes to constructs, a list of + // constructs, or a single construct. + + function hook(constructs, returnState, bogusState) { + var listOfConstructs + var constructIndex + var currentConstruct + var info + return constructs.tokenize || 'length' in constructs + ? handleListOfConstructs(miniflat(constructs)) + : handleMapOfConstructs + + function handleMapOfConstructs(code) { + if (code in constructs || null in constructs) { + return handleListOfConstructs( + constructs.null + ? /* c8 ignore next */ + miniflat(constructs[code]).concat(miniflat(constructs.null)) + : constructs[code] + )(code) + } + + return bogusState(code) + } + + function handleListOfConstructs(list) { + listOfConstructs = list + constructIndex = 0 + return handleConstruct(list[constructIndex]) + } + + function handleConstruct(construct) { + return start + + function start(code) { + // To do: not nede to store if there is no bogus state, probably? + // Currently doesn’t work because `inspect` in document does a check + // w/o a bogus, which doesn’t make sense. But it does seem to help perf + // by not storing. + info = store() + currentConstruct = construct + + if (!construct.partial) { + context.currentConstruct = construct + } + + if ( + construct.name && + context.parser.constructs.disable.null.indexOf(construct.name) > -1 + ) { + return nok() + } + + return construct.tokenize.call( + fields ? assign({}, context, fields) : context, + effects, + ok, + nok + )(code) + } + } + + function ok(code) { + onreturn(currentConstruct, info) + return returnState + } + + function nok(code) { + info.restore() + + if (++constructIndex < listOfConstructs.length) { + return handleConstruct(listOfConstructs[constructIndex]) + } + + return bogusState + } + } + } + + function addResult(construct, from) { + if (construct.resolveAll && resolveAllConstructs.indexOf(construct) < 0) { + resolveAllConstructs.push(construct) + } + + if (construct.resolve) { + chunkedSplice( + context.events, + from, + context.events.length - from, + construct.resolve(context.events.slice(from), context) + ) + } + + if (construct.resolveTo) { + context.events = construct.resolveTo(context.events, context) + } + } + + function store() { + var startPoint = now() + var startPrevious = context.previous + var startCurrentConstruct = context.currentConstruct + var startEventsIndex = context.events.length + var startStack = Array.from(stack) + return { + restore: restore, + from: startEventsIndex + } + + function restore() { + point = startPoint + context.previous = startPrevious + context.currentConstruct = startCurrentConstruct + context.events.length = startEventsIndex + stack = startStack + accountForPotentialSkip() + } + } + + function accountForPotentialSkip() { + if (point.line in columnStart && point.column < 2) { + point.column = columnStart[point.line] + point.offset += columnStart[point.line] - 1 + } + } +} + +module.exports = createTokenizer diff --git a/node_modules/micromark/dist/util/miniflat.js b/node_modules/micromark/dist/util/miniflat.js new file mode 100644 index 00000000..39c5dd4f --- /dev/null +++ b/node_modules/micromark/dist/util/miniflat.js @@ -0,0 +1,11 @@ +'use strict' + +function miniflat(value) { + return value === null || value === undefined + ? [] + : 'length' in value + ? value + : [value] +} + +module.exports = miniflat diff --git a/node_modules/micromark/dist/util/move-point.js b/node_modules/micromark/dist/util/move-point.js new file mode 100644 index 00000000..63c69a2b --- /dev/null +++ b/node_modules/micromark/dist/util/move-point.js @@ -0,0 +1,12 @@ +'use strict' + +// chunks (replacement characters, tabs, or line endings). + +function movePoint(point, offset) { + point.column += offset + point.offset += offset + point._bufferIndex += offset + return point +} + +module.exports = movePoint diff --git a/node_modules/micromark/dist/util/normalize-identifier.js b/node_modules/micromark/dist/util/normalize-identifier.js new file mode 100644 index 00000000..f0632134 --- /dev/null +++ b/node_modules/micromark/dist/util/normalize-identifier.js @@ -0,0 +1,18 @@ +'use strict' + +function normalizeIdentifier(value) { + return ( + value // Collapse Markdown whitespace. + .replace(/[\t\n\r ]+/g, ' ') // Trim. + .replace(/^ | $/g, '') // Some characters are considered “uppercase”, but if their lowercase + // counterpart is uppercased will result in a different uppercase + // character. + // Hence, to get that form, we perform both lower- and uppercase. + // Upper case makes sure keys will not interact with default prototypal + // methods: no object method is uppercase. + .toLowerCase() + .toUpperCase() + ) +} + +module.exports = normalizeIdentifier diff --git a/node_modules/micromark/dist/util/normalize-uri.js b/node_modules/micromark/dist/util/normalize-uri.js new file mode 100644 index 00000000..8a19ace2 --- /dev/null +++ b/node_modules/micromark/dist/util/normalize-uri.js @@ -0,0 +1,62 @@ +'use strict' + +var asciiAlphanumeric = require('../character/ascii-alphanumeric.js') +var fromCharCode = require('../constant/from-char-code.js') + +// encoded sequences. + +function normalizeUri(value) { + var index = -1 + var result = [] + var start = 0 + var skip = 0 + var code + var next + var replace + + while (++index < value.length) { + code = value.charCodeAt(index) // A correct percent encoded value. + + if ( + code === 37 && + asciiAlphanumeric(value.charCodeAt(index + 1)) && + asciiAlphanumeric(value.charCodeAt(index + 2)) + ) { + skip = 2 + } // ASCII. + else if (code < 128) { + if (!/[!#$&-;=?-Z_a-z~]/.test(fromCharCode(code))) { + replace = fromCharCode(code) + } + } // Astral. + else if (code > 55295 && code < 57344) { + next = value.charCodeAt(index + 1) // A correct surrogate pair. + + if (code < 56320 && next > 56319 && next < 57344) { + replace = fromCharCode(code, next) + skip = 1 + } // Lone surrogate. + else { + replace = '\uFFFD' + } + } // Unicode. + else { + replace = fromCharCode(code) + } + + if (replace) { + result.push(value.slice(start, index), encodeURIComponent(replace)) + start = index + skip + 1 + replace = undefined + } + + if (skip) { + index += skip + skip = 0 + } + } + + return result.join('') + value.slice(start) +} + +module.exports = normalizeUri diff --git a/node_modules/micromark/dist/util/prefix-size.js b/node_modules/micromark/dist/util/prefix-size.js new file mode 100644 index 00000000..a560e3e8 --- /dev/null +++ b/node_modules/micromark/dist/util/prefix-size.js @@ -0,0 +1,11 @@ +'use strict' + +var sizeChunks = require('./size-chunks.js') + +function prefixSize(events, type) { + var tail = events[events.length - 1] + if (!tail || tail[1].type !== type) return 0 + return sizeChunks(tail[2].sliceStream(tail[1])) +} + +module.exports = prefixSize diff --git a/node_modules/micromark/dist/util/regex-check.js b/node_modules/micromark/dist/util/regex-check.js new file mode 100644 index 00000000..b879f444 --- /dev/null +++ b/node_modules/micromark/dist/util/regex-check.js @@ -0,0 +1,13 @@ +'use strict' + +var fromCharCode = require('../constant/from-char-code.js') + +function regexCheck(regex) { + return check + + function check(code) { + return regex.test(fromCharCode(code)) + } +} + +module.exports = regexCheck diff --git a/node_modules/micromark/dist/util/resolve-all.js b/node_modules/micromark/dist/util/resolve-all.js new file mode 100644 index 00000000..3e8d76b4 --- /dev/null +++ b/node_modules/micromark/dist/util/resolve-all.js @@ -0,0 +1,20 @@ +'use strict' + +function resolveAll(constructs, events, context) { + var called = [] + var index = -1 + var resolve + + while (++index < constructs.length) { + resolve = constructs[index].resolveAll + + if (resolve && called.indexOf(resolve) < 0) { + events = resolve(events, context) + called.push(resolve) + } + } + + return events +} + +module.exports = resolveAll diff --git a/node_modules/micromark/dist/util/safe-from-int.js b/node_modules/micromark/dist/util/safe-from-int.js new file mode 100644 index 00000000..08dcac94 --- /dev/null +++ b/node_modules/micromark/dist/util/safe-from-int.js @@ -0,0 +1,26 @@ +'use strict' + +var fromCharCode = require('../constant/from-char-code.js') + +function safeFromInt(value, base) { + var code = parseInt(value, base) + + if ( + // C0 except for HT, LF, FF, CR, space + code < 9 || + code === 11 || + (code > 13 && code < 32) || // Control character (DEL) of the basic block and C1 controls. + (code > 126 && code < 160) || // Lone high surrogates and low surrogates. + (code > 55295 && code < 57344) || // Noncharacters. + (code > 64975 && code < 65008) || + (code & 65535) === 65535 || + (code & 65535) === 65534 || // Out of range + code > 1114111 + ) { + return '\uFFFD' + } + + return fromCharCode(code) +} + +module.exports = safeFromInt diff --git a/node_modules/micromark/dist/util/serialize-chunks.js b/node_modules/micromark/dist/util/serialize-chunks.js new file mode 100644 index 00000000..48d9e24f --- /dev/null +++ b/node_modules/micromark/dist/util/serialize-chunks.js @@ -0,0 +1,40 @@ +'use strict' + +var fromCharCode = require('../constant/from-char-code.js') + +function serializeChunks(chunks) { + var index = -1 + var result = [] + var chunk + var value + var atTab + + while (++index < chunks.length) { + chunk = chunks[index] + + if (typeof chunk === 'string') { + value = chunk + } else if (chunk === -5) { + value = '\r' + } else if (chunk === -4) { + value = '\n' + } else if (chunk === -3) { + value = '\r' + '\n' + } else if (chunk === -2) { + value = '\t' + } else if (chunk === -1) { + if (atTab) continue + value = ' ' + } else { + // Currently only replacement character. + value = fromCharCode(chunk) + } + + atTab = chunk === -2 + result.push(value) + } + + return result.join('') +} + +module.exports = serializeChunks diff --git a/node_modules/micromark/dist/util/shallow.js b/node_modules/micromark/dist/util/shallow.js new file mode 100644 index 00000000..f980ab99 --- /dev/null +++ b/node_modules/micromark/dist/util/shallow.js @@ -0,0 +1,9 @@ +'use strict' + +var assign = require('../constant/assign.js') + +function shallow(object) { + return assign({}, object) +} + +module.exports = shallow diff --git a/node_modules/micromark/dist/util/size-chunks.js b/node_modules/micromark/dist/util/size-chunks.js new file mode 100644 index 00000000..85bacf0d --- /dev/null +++ b/node_modules/micromark/dist/util/size-chunks.js @@ -0,0 +1,16 @@ +'use strict' + +// Counts tabs based on their expanded size, and CR+LF as one character. + +function sizeChunks(chunks) { + var index = -1 + var size = 0 + + while (++index < chunks.length) { + size += typeof chunks[index] === 'string' ? chunks[index].length : 1 + } + + return size +} + +module.exports = sizeChunks diff --git a/node_modules/micromark/dist/util/slice-chunks.js b/node_modules/micromark/dist/util/slice-chunks.js new file mode 100644 index 00000000..a1ad9289 --- /dev/null +++ b/node_modules/micromark/dist/util/slice-chunks.js @@ -0,0 +1,27 @@ +'use strict' + +function sliceChunks(chunks, token) { + var startIndex = token.start._index + var startBufferIndex = token.start._bufferIndex + var endIndex = token.end._index + var endBufferIndex = token.end._bufferIndex + var view + + if (startIndex === endIndex) { + view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)] + } else { + view = chunks.slice(startIndex, endIndex) + + if (startBufferIndex > -1) { + view[0] = view[0].slice(startBufferIndex) + } + + if (endBufferIndex > 0) { + view.push(chunks[endIndex].slice(0, endBufferIndex)) + } + } + + return view +} + +module.exports = sliceChunks diff --git a/node_modules/micromark/dist/util/subtokenize.js b/node_modules/micromark/dist/util/subtokenize.js new file mode 100644 index 00000000..dd960c6e --- /dev/null +++ b/node_modules/micromark/dist/util/subtokenize.js @@ -0,0 +1,199 @@ +'use strict' + +var assign = require('../constant/assign.js') +var chunkedSplice = require('./chunked-splice.js') +var shallow = require('./shallow.js') + +function subtokenize(events) { + var jumps = {} + var index = -1 + var event + var lineIndex + var otherIndex + var otherEvent + var parameters + var subevents + var more + + while (++index < events.length) { + while (index in jumps) { + index = jumps[index] + } + + event = events[index] // Add a hook for the GFM tasklist extension, which needs to know if text + // is in the first content of a list item. + + if ( + index && + event[1].type === 'chunkFlow' && + events[index - 1][1].type === 'listItemPrefix' + ) { + subevents = event[1]._tokenizer.events + otherIndex = 0 + + if ( + otherIndex < subevents.length && + subevents[otherIndex][1].type === 'lineEndingBlank' + ) { + otherIndex += 2 + } + + if ( + otherIndex < subevents.length && + subevents[otherIndex][1].type === 'content' + ) { + while (++otherIndex < subevents.length) { + if (subevents[otherIndex][1].type === 'content') { + break + } + + if (subevents[otherIndex][1].type === 'chunkText') { + subevents[otherIndex][1].isInFirstContentOfListItem = true + otherIndex++ + } + } + } + } // Enter. + + if (event[0] === 'enter') { + if (event[1].contentType) { + assign(jumps, subcontent(events, index)) + index = jumps[index] + more = true + } + } // Exit. + else if (event[1]._container || event[1]._movePreviousLineEndings) { + otherIndex = index + lineIndex = undefined + + while (otherIndex--) { + otherEvent = events[otherIndex] + + if ( + otherEvent[1].type === 'lineEnding' || + otherEvent[1].type === 'lineEndingBlank' + ) { + if (otherEvent[0] === 'enter') { + if (lineIndex) { + events[lineIndex][1].type = 'lineEndingBlank' + } + + otherEvent[1].type = 'lineEnding' + lineIndex = otherIndex + } + } else { + break + } + } + + if (lineIndex) { + // Fix position. + event[1].end = shallow(events[lineIndex][1].start) // Switch container exit w/ line endings. + + parameters = events.slice(lineIndex, index) + parameters.unshift(event) + chunkedSplice(events, lineIndex, index - lineIndex + 1, parameters) + } + } + } + + return !more +} + +function subcontent(events, eventIndex) { + var token = events[eventIndex][1] + var context = events[eventIndex][2] + var startPosition = eventIndex - 1 + var startPositions = [] + var tokenizer = + token._tokenizer || context.parser[token.contentType](token.start) + var childEvents = tokenizer.events + var jumps = [] + var gaps = {} + var stream + var previous + var index + var entered + var end + var adjust // Loop forward through the linked tokens to pass them in order to the + // subtokenizer. + + while (token) { + // Find the position of the event for this token. + while (events[++startPosition][1] !== token) { + // Empty. + } + + startPositions.push(startPosition) + + if (!token._tokenizer) { + stream = context.sliceStream(token) + + if (!token.next) { + stream.push(null) + } + + if (previous) { + tokenizer.defineSkip(token.start) + } + + if (token.isInFirstContentOfListItem) { + tokenizer._gfmTasklistFirstContentOfListItem = true + } + + tokenizer.write(stream) + + if (token.isInFirstContentOfListItem) { + tokenizer._gfmTasklistFirstContentOfListItem = undefined + } + } // Unravel the next token. + + previous = token + token = token.next + } // Now, loop back through all events (and linked tokens), to figure out which + // parts belong where. + + token = previous + index = childEvents.length + + while (index--) { + // Make sure we’ve at least seen something (final eol is part of the last + // token). + if (childEvents[index][0] === 'enter') { + entered = true + } else if ( + // Find a void token that includes a break. + entered && + childEvents[index][1].type === childEvents[index - 1][1].type && + childEvents[index][1].start.line !== childEvents[index][1].end.line + ) { + add(childEvents.slice(index + 1, end)) + // Help GC. + token._tokenizer = token.next = undefined + token = token.previous + end = index + 1 + } + } + + // Help GC. + tokenizer.events = token._tokenizer = token.next = undefined // Do head: + + add(childEvents.slice(0, end)) + index = -1 + adjust = 0 + + while (++index < jumps.length) { + gaps[adjust + jumps[index][0]] = adjust + jumps[index][1] + adjust += jumps[index][1] - jumps[index][0] - 1 + } + + return gaps + + function add(slice) { + var start = startPositions.pop() + jumps.unshift([start, start + slice.length - 1]) + chunkedSplice(events, start, 2, slice) + } +} + +module.exports = subtokenize diff --git a/node_modules/micromark/index.d.ts b/node_modules/micromark/index.d.ts new file mode 100644 index 00000000..9b9cbaef --- /dev/null +++ b/node_modules/micromark/index.d.ts @@ -0,0 +1,5 @@ +// Minimum TypeScript Version: 3.0 + +import buffer from './buffer' + +export default buffer diff --git a/node_modules/micromark/index.js b/node_modules/micromark/index.js new file mode 100644 index 00000000..bb7c67d9 --- /dev/null +++ b/node_modules/micromark/index.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('./buffer.js') diff --git a/node_modules/micromark/index.mjs b/node_modules/micromark/index.mjs new file mode 100644 index 00000000..2e841cc1 --- /dev/null +++ b/node_modules/micromark/index.mjs @@ -0,0 +1 @@ +export {default} from './buffer.mjs' diff --git a/node_modules/micromark/lib/character/ascii-alpha.js b/node_modules/micromark/lib/character/ascii-alpha.js new file mode 100644 index 00000000..4e5b20d2 --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-alpha.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var asciiAlpha = regexCheck(/[A-Za-z]/) + +module.exports = asciiAlpha diff --git a/node_modules/micromark/lib/character/ascii-alpha.mjs b/node_modules/micromark/lib/character/ascii-alpha.mjs new file mode 100644 index 00000000..f6f3aaba --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-alpha.mjs @@ -0,0 +1,3 @@ +import check from '../util/regex-check.mjs' + +export default check(/[A-Za-z]/) diff --git a/node_modules/micromark/lib/character/ascii-alphanumeric.js b/node_modules/micromark/lib/character/ascii-alphanumeric.js new file mode 100644 index 00000000..4ab36027 --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-alphanumeric.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var asciiAlphanumeric = regexCheck(/[\dA-Za-z]/) + +module.exports = asciiAlphanumeric diff --git a/node_modules/micromark/lib/character/ascii-alphanumeric.mjs b/node_modules/micromark/lib/character/ascii-alphanumeric.mjs new file mode 100644 index 00000000..efed7145 --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-alphanumeric.mjs @@ -0,0 +1,3 @@ +import check from '../util/regex-check.mjs' + +export default check(/[\dA-Za-z]/) diff --git a/node_modules/micromark/lib/character/ascii-atext.js b/node_modules/micromark/lib/character/ascii-atext.js new file mode 100644 index 00000000..8962f996 --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-atext.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var asciiAtext = regexCheck(/[#-'*+\--9=?A-Z^-~]/) + +module.exports = asciiAtext diff --git a/node_modules/micromark/lib/character/ascii-atext.mjs b/node_modules/micromark/lib/character/ascii-atext.mjs new file mode 100644 index 00000000..56b84c42 --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-atext.mjs @@ -0,0 +1,3 @@ +import check from '../util/regex-check.mjs' + +export default check(/[#-'*+\--9=?A-Z^-~]/) diff --git a/node_modules/micromark/lib/character/ascii-control.js b/node_modules/micromark/lib/character/ascii-control.js new file mode 100644 index 00000000..c134a613 --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-control.js @@ -0,0 +1,14 @@ +'use strict' + +var codes = require('./codes.js') + +// Note: EOF is seen as ASCII control here, because `null < 32 == true`. +function asciiControl(code) { + return ( + // Special whitespace codes (which have negative values), C0 and Control + // character DEL + code < codes.space || code === codes.del + ) +} + +module.exports = asciiControl diff --git a/node_modules/micromark/lib/character/ascii-control.mjs b/node_modules/micromark/lib/character/ascii-control.mjs new file mode 100644 index 00000000..08241919 --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-control.mjs @@ -0,0 +1,12 @@ +export default asciiControl + +import codes from './codes.mjs' + +// Note: EOF is seen as ASCII control here, because `null < 32 == true`. +function asciiControl(code) { + return ( + // Special whitespace codes (which have negative values), C0 and Control + // character DEL + code < codes.space || code === codes.del + ) +} diff --git a/node_modules/micromark/lib/character/ascii-digit.js b/node_modules/micromark/lib/character/ascii-digit.js new file mode 100644 index 00000000..da614c4e --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-digit.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var asciiDigit = regexCheck(/\d/) + +module.exports = asciiDigit diff --git a/node_modules/micromark/lib/character/ascii-digit.mjs b/node_modules/micromark/lib/character/ascii-digit.mjs new file mode 100644 index 00000000..ec3b6e11 --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-digit.mjs @@ -0,0 +1,3 @@ +import check from '../util/regex-check.mjs' + +export default check(/\d/) diff --git a/node_modules/micromark/lib/character/ascii-hex-digit.js b/node_modules/micromark/lib/character/ascii-hex-digit.js new file mode 100644 index 00000000..a0e7af43 --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-hex-digit.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var asciiHexDigit = regexCheck(/[\dA-Fa-f]/) + +module.exports = asciiHexDigit diff --git a/node_modules/micromark/lib/character/ascii-hex-digit.mjs b/node_modules/micromark/lib/character/ascii-hex-digit.mjs new file mode 100644 index 00000000..3eabedbf --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-hex-digit.mjs @@ -0,0 +1,3 @@ +import check from '../util/regex-check.mjs' + +export default check(/[\dA-Fa-f]/) diff --git a/node_modules/micromark/lib/character/ascii-punctuation.js b/node_modules/micromark/lib/character/ascii-punctuation.js new file mode 100644 index 00000000..596b45a5 --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-punctuation.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var asciiPunctuation = regexCheck(/[!-/:-@[-`{-~]/) + +module.exports = asciiPunctuation diff --git a/node_modules/micromark/lib/character/ascii-punctuation.mjs b/node_modules/micromark/lib/character/ascii-punctuation.mjs new file mode 100644 index 00000000..d8308f11 --- /dev/null +++ b/node_modules/micromark/lib/character/ascii-punctuation.mjs @@ -0,0 +1,3 @@ +import check from '../util/regex-check.mjs' + +export default check(/[!-/:-@[-`{-~]/) diff --git a/node_modules/micromark/lib/character/codes.d.ts b/node_modules/micromark/lib/character/codes.d.ts new file mode 100644 index 00000000..6908558c --- /dev/null +++ b/node_modules/micromark/lib/character/codes.d.ts @@ -0,0 +1,148 @@ +// This module is generated by `script/`. + +export type Code = null | number + +// @for-script: REMOVE_ALL_THING_BELOW + +export interface Codes { + carriageReturn: -5 + lineFeed: -4 + carriageReturnLineFeed: -3 + horizontalTab: -2 + virtualSpace: -1 + eof: null + nul: 0 + soh: 1 + stx: 2 + etx: 3 + eot: 4 + enq: 5 + ack: 6 + bel: 7 + bs: 8 + ht: 9 + lf: 10 + vt: 11 + ff: 12 + cr: 13 + so: 14 + si: 15 + dle: 16 + dc1: 17 + dc2: 18 + dc3: 19 + dc4: 20 + nak: 21 + syn: 22 + etb: 23 + can: 24 + em: 25 + sub: 26 + esc: 27 + fs: 28 + gs: 29 + rs: 30 + us: 31 + space: 32 + exclamationMark: 33 + quotationMark: 34 + numberSign: 35 + dollarSign: 36 + percentSign: 37 + ampersand: 38 + apostrophe: 39 + leftParenthesis: 40 + rightParenthesis: 41 + asterisk: 42 + plusSign: 43 + comma: 44 + dash: 45 + dot: 46 + slash: 47 + digit0: 48 + digit1: 49 + digit2: 50 + digit3: 51 + digit4: 52 + digit5: 53 + digit6: 54 + digit7: 55 + digit8: 56 + digit9: 57 + colon: 58 + semicolon: 59 + lessThan: 60 + equalsTo: 61 + greaterThan: 62 + questionMark: 63 + atSign: 64 + uppercaseA: 65 + uppercaseB: 66 + uppercaseC: 67 + uppercaseD: 68 + uppercaseE: 69 + uppercaseF: 70 + uppercaseG: 71 + uppercaseH: 72 + uppercaseI: 73 + uppercaseJ: 74 + uppercaseK: 75 + uppercaseL: 76 + uppercaseM: 77 + uppercaseN: 78 + uppercaseO: 79 + uppercaseP: 80 + uppercaseQ: 81 + uppercaseR: 82 + uppercaseS: 83 + uppercaseT: 84 + uppercaseU: 85 + uppercaseV: 86 + uppercaseW: 87 + uppercaseX: 88 + uppercaseY: 89 + uppercaseZ: 90 + leftSquareBracket: 91 + backslash: 92 + rightSquareBracket: 93 + caret: 94 + underscore: 95 + graveAccent: 96 + lowercaseA: 97 + lowercaseB: 98 + lowercaseC: 99 + lowercaseD: 100 + lowercaseE: 101 + lowercaseF: 102 + lowercaseG: 103 + lowercaseH: 104 + lowercaseI: 105 + lowercaseJ: 106 + lowercaseK: 107 + lowercaseL: 108 + lowercaseM: 109 + lowercaseN: 110 + lowercaseO: 111 + lowercaseP: 112 + lowercaseQ: 113 + lowercaseR: 114 + lowercaseS: 115 + lowercaseT: 116 + lowercaseU: 117 + lowercaseV: 118 + lowercaseW: 119 + lowercaseX: 120 + lowercaseY: 121 + lowercaseZ: 122 + leftCurlyBrace: 123 + verticalBar: 124 + rightCurlyBrace: 125 + tilde: 126 + del: 127 + byteOrderMarker: 65279 + replacementCharacter: 65533 +} + +declare const value: Codes + +export default value diff --git a/node_modules/micromark/lib/character/codes.js b/node_modules/micromark/lib/character/codes.js new file mode 100644 index 00000000..46ab8180 --- /dev/null +++ b/node_modules/micromark/lib/character/codes.js @@ -0,0 +1,158 @@ +'use strict' + +// This module is compiled away! +// +// micromark works based on character codes. +// This module contains constants for the ASCII block and the replacement +// character. +// A couple of them are handled in a special way, such as the line endings +// (CR, LF, and CR+LF, commonly known as end-of-line: EOLs), the tab (horizontal +// tab) and its expansion based on what column it’s at (virtual space), +// and the end-of-file (eof) character. +// As values are preprocessed before handling them, the actual characters LF, +// CR, HT, and NUL (which is present as the replacement character), are +// guaranteed to not exist. +// +// Unicode basic latin block. +var codes = { + carriageReturn: -5, + lineFeed: -4, + carriageReturnLineFeed: -3, + horizontalTab: -2, + virtualSpace: -1, + eof: null, + nul: 0, + soh: 1, + stx: 2, + etx: 3, + eot: 4, + enq: 5, + ack: 6, + bel: 7, + bs: 8, + ht: 9, // `\t` + lf: 10, // `\n` + vt: 11, // `\v` + ff: 12, // `\f` + cr: 13, // `\r` + so: 14, + si: 15, + dle: 16, + dc1: 17, + dc2: 18, + dc3: 19, + dc4: 20, + nak: 21, + syn: 22, + etb: 23, + can: 24, + em: 25, + sub: 26, + esc: 27, + fs: 28, + gs: 29, + rs: 30, + us: 31, + space: 32, + exclamationMark: 33, // `!` + quotationMark: 34, // `"` + numberSign: 35, // `#` + dollarSign: 36, // `$` + percentSign: 37, // `%` + ampersand: 38, // `&` + apostrophe: 39, // `'` + leftParenthesis: 40, // `(` + rightParenthesis: 41, // `)` + asterisk: 42, // `*` + plusSign: 43, // `+` + comma: 44, // `,` + dash: 45, // `-` + dot: 46, // `.` + slash: 47, // `/` + digit0: 48, // `0` + digit1: 49, // `1` + digit2: 50, // `2` + digit3: 51, // `3` + digit4: 52, // `4` + digit5: 53, // `5` + digit6: 54, // `6` + digit7: 55, // `7` + digit8: 56, // `8` + digit9: 57, // `9` + colon: 58, // `:` + semicolon: 59, // `;` + lessThan: 60, // `<` + equalsTo: 61, // `=` + greaterThan: 62, // `>` + questionMark: 63, // `?` + atSign: 64, // `@` + uppercaseA: 65, // `A` + uppercaseB: 66, // `B` + uppercaseC: 67, // `C` + uppercaseD: 68, // `D` + uppercaseE: 69, // `E` + uppercaseF: 70, // `F` + uppercaseG: 71, // `G` + uppercaseH: 72, // `H` + uppercaseI: 73, // `I` + uppercaseJ: 74, // `J` + uppercaseK: 75, // `K` + uppercaseL: 76, // `L` + uppercaseM: 77, // `M` + uppercaseN: 78, // `N` + uppercaseO: 79, // `O` + uppercaseP: 80, // `P` + uppercaseQ: 81, // `Q` + uppercaseR: 82, // `R` + uppercaseS: 83, // `S` + uppercaseT: 84, // `T` + uppercaseU: 85, // `U` + uppercaseV: 86, // `V` + uppercaseW: 87, // `W` + uppercaseX: 88, // `X` + uppercaseY: 89, // `Y` + uppercaseZ: 90, // `Z` + leftSquareBracket: 91, // `[` + backslash: 92, // `\` + rightSquareBracket: 93, // `]` + caret: 94, // `^` + underscore: 95, // `_` + graveAccent: 96, // `` ` `` + lowercaseA: 97, // `a` + lowercaseB: 98, // `b` + lowercaseC: 99, // `c` + lowercaseD: 100, // `d` + lowercaseE: 101, // `e` + lowercaseF: 102, // `f` + lowercaseG: 103, // `g` + lowercaseH: 104, // `h` + lowercaseI: 105, // `i` + lowercaseJ: 106, // `j` + lowercaseK: 107, // `k` + lowercaseL: 108, // `l` + lowercaseM: 109, // `m` + lowercaseN: 110, // `n` + lowercaseO: 111, // `o` + lowercaseP: 112, // `p` + lowercaseQ: 113, // `q` + lowercaseR: 114, // `r` + lowercaseS: 115, // `s` + lowercaseT: 116, // `t` + lowercaseU: 117, // `u` + lowercaseV: 118, // `v` + lowercaseW: 119, // `w` + lowercaseX: 120, // `x` + lowercaseY: 121, // `y` + lowercaseZ: 122, // `z` + leftCurlyBrace: 123, // `{` + verticalBar: 124, // `|` + rightCurlyBrace: 125, // `}` + tilde: 126, // `~` + del: 127, + // Unicode Specials block. + byteOrderMarker: 65279, + // Unicode Specials block. + replacementCharacter: 65533 // `�` +} + +module.exports = codes diff --git a/node_modules/micromark/lib/character/codes.mjs b/node_modules/micromark/lib/character/codes.mjs new file mode 100644 index 00000000..7503f472 --- /dev/null +++ b/node_modules/micromark/lib/character/codes.mjs @@ -0,0 +1,154 @@ +// This module is compiled away! +// +// micromark works based on character codes. +// This module contains constants for the ASCII block and the replacement +// character. +// A couple of them are handled in a special way, such as the line endings +// (CR, LF, and CR+LF, commonly known as end-of-line: EOLs), the tab (horizontal +// tab) and its expansion based on what column it’s at (virtual space), +// and the end-of-file (eof) character. +// As values are preprocessed before handling them, the actual characters LF, +// CR, HT, and NUL (which is present as the replacement character), are +// guaranteed to not exist. +// +// Unicode basic latin block. +export default { + carriageReturn: -5, + lineFeed: -4, + carriageReturnLineFeed: -3, + horizontalTab: -2, + virtualSpace: -1, + eof: null, + nul: 0, + soh: 1, + stx: 2, + etx: 3, + eot: 4, + enq: 5, + ack: 6, + bel: 7, + bs: 8, + ht: 9, // `\t` + lf: 10, // `\n` + vt: 11, // `\v` + ff: 12, // `\f` + cr: 13, // `\r` + so: 14, + si: 15, + dle: 16, + dc1: 17, + dc2: 18, + dc3: 19, + dc4: 20, + nak: 21, + syn: 22, + etb: 23, + can: 24, + em: 25, + sub: 26, + esc: 27, + fs: 28, + gs: 29, + rs: 30, + us: 31, + space: 32, + exclamationMark: 33, // `!` + quotationMark: 34, // `"` + numberSign: 35, // `#` + dollarSign: 36, // `$` + percentSign: 37, // `%` + ampersand: 38, // `&` + apostrophe: 39, // `'` + leftParenthesis: 40, // `(` + rightParenthesis: 41, // `)` + asterisk: 42, // `*` + plusSign: 43, // `+` + comma: 44, // `,` + dash: 45, // `-` + dot: 46, // `.` + slash: 47, // `/` + digit0: 48, // `0` + digit1: 49, // `1` + digit2: 50, // `2` + digit3: 51, // `3` + digit4: 52, // `4` + digit5: 53, // `5` + digit6: 54, // `6` + digit7: 55, // `7` + digit8: 56, // `8` + digit9: 57, // `9` + colon: 58, // `:` + semicolon: 59, // `;` + lessThan: 60, // `<` + equalsTo: 61, // `=` + greaterThan: 62, // `>` + questionMark: 63, // `?` + atSign: 64, // `@` + uppercaseA: 65, // `A` + uppercaseB: 66, // `B` + uppercaseC: 67, // `C` + uppercaseD: 68, // `D` + uppercaseE: 69, // `E` + uppercaseF: 70, // `F` + uppercaseG: 71, // `G` + uppercaseH: 72, // `H` + uppercaseI: 73, // `I` + uppercaseJ: 74, // `J` + uppercaseK: 75, // `K` + uppercaseL: 76, // `L` + uppercaseM: 77, // `M` + uppercaseN: 78, // `N` + uppercaseO: 79, // `O` + uppercaseP: 80, // `P` + uppercaseQ: 81, // `Q` + uppercaseR: 82, // `R` + uppercaseS: 83, // `S` + uppercaseT: 84, // `T` + uppercaseU: 85, // `U` + uppercaseV: 86, // `V` + uppercaseW: 87, // `W` + uppercaseX: 88, // `X` + uppercaseY: 89, // `Y` + uppercaseZ: 90, // `Z` + leftSquareBracket: 91, // `[` + backslash: 92, // `\` + rightSquareBracket: 93, // `]` + caret: 94, // `^` + underscore: 95, // `_` + graveAccent: 96, // `` ` `` + lowercaseA: 97, // `a` + lowercaseB: 98, // `b` + lowercaseC: 99, // `c` + lowercaseD: 100, // `d` + lowercaseE: 101, // `e` + lowercaseF: 102, // `f` + lowercaseG: 103, // `g` + lowercaseH: 104, // `h` + lowercaseI: 105, // `i` + lowercaseJ: 106, // `j` + lowercaseK: 107, // `k` + lowercaseL: 108, // `l` + lowercaseM: 109, // `m` + lowercaseN: 110, // `n` + lowercaseO: 111, // `o` + lowercaseP: 112, // `p` + lowercaseQ: 113, // `q` + lowercaseR: 114, // `r` + lowercaseS: 115, // `s` + lowercaseT: 116, // `t` + lowercaseU: 117, // `u` + lowercaseV: 118, // `v` + lowercaseW: 119, // `w` + lowercaseX: 120, // `x` + lowercaseY: 121, // `y` + lowercaseZ: 122, // `z` + leftCurlyBrace: 123, // `{` + verticalBar: 124, // `|` + rightCurlyBrace: 125, // `}` + tilde: 126, // `~` + del: 127, + // Unicode Specials block. + byteOrderMarker: 65279, + // Unicode Specials block. + replacementCharacter: 65533 // `�` +} diff --git a/node_modules/micromark/lib/character/markdown-line-ending-or-space.js b/node_modules/micromark/lib/character/markdown-line-ending-or-space.js new file mode 100644 index 00000000..2b6ffb9b --- /dev/null +++ b/node_modules/micromark/lib/character/markdown-line-ending-or-space.js @@ -0,0 +1,9 @@ +'use strict' + +var codes = require('./codes.js') + +function markdownLineEndingOrSpace(code) { + return code < codes.nul || code === codes.space +} + +module.exports = markdownLineEndingOrSpace diff --git a/node_modules/micromark/lib/character/markdown-line-ending-or-space.mjs b/node_modules/micromark/lib/character/markdown-line-ending-or-space.mjs new file mode 100644 index 00000000..6e27e037 --- /dev/null +++ b/node_modules/micromark/lib/character/markdown-line-ending-or-space.mjs @@ -0,0 +1,7 @@ +export default markdownLineEndingOrSpace + +import codes from './codes.mjs' + +function markdownLineEndingOrSpace(code) { + return code < codes.nul || code === codes.space +} diff --git a/node_modules/micromark/lib/character/markdown-line-ending.js b/node_modules/micromark/lib/character/markdown-line-ending.js new file mode 100644 index 00000000..05032eef --- /dev/null +++ b/node_modules/micromark/lib/character/markdown-line-ending.js @@ -0,0 +1,9 @@ +'use strict' + +var codes = require('./codes.js') + +function markdownLineEnding(code) { + return code < codes.horizontalTab +} + +module.exports = markdownLineEnding diff --git a/node_modules/micromark/lib/character/markdown-line-ending.mjs b/node_modules/micromark/lib/character/markdown-line-ending.mjs new file mode 100644 index 00000000..63c1b71c --- /dev/null +++ b/node_modules/micromark/lib/character/markdown-line-ending.mjs @@ -0,0 +1,7 @@ +export default markdownLineEnding + +import codes from './codes.mjs' + +function markdownLineEnding(code) { + return code < codes.horizontalTab +} diff --git a/node_modules/micromark/lib/character/markdown-space.js b/node_modules/micromark/lib/character/markdown-space.js new file mode 100644 index 00000000..6273782f --- /dev/null +++ b/node_modules/micromark/lib/character/markdown-space.js @@ -0,0 +1,13 @@ +'use strict' + +var codes = require('./codes.js') + +function markdownSpace(code) { + return ( + code === codes.horizontalTab || + code === codes.virtualSpace || + code === codes.space + ) +} + +module.exports = markdownSpace diff --git a/node_modules/micromark/lib/character/markdown-space.mjs b/node_modules/micromark/lib/character/markdown-space.mjs new file mode 100644 index 00000000..03b72a18 --- /dev/null +++ b/node_modules/micromark/lib/character/markdown-space.mjs @@ -0,0 +1,11 @@ +export default markdownSpace + +import codes from './codes.mjs' + +function markdownSpace(code) { + return ( + code === codes.horizontalTab || + code === codes.virtualSpace || + code === codes.space + ) +} diff --git a/node_modules/micromark/lib/character/unicode-punctuation.js b/node_modules/micromark/lib/character/unicode-punctuation.js new file mode 100644 index 00000000..ae28b173 --- /dev/null +++ b/node_modules/micromark/lib/character/unicode-punctuation.js @@ -0,0 +1,10 @@ +'use strict' + +var unicodePunctuationRegex = require('../constant/unicode-punctuation-regex.js') +var regexCheck = require('../util/regex-check.js') + +// Size note: removing ASCII from the regex and using `ascii-punctuation` here +// In fact adds to the bundle size. +var unicodePunctuation = regexCheck(unicodePunctuationRegex) + +module.exports = unicodePunctuation diff --git a/node_modules/micromark/lib/character/unicode-punctuation.mjs b/node_modules/micromark/lib/character/unicode-punctuation.mjs new file mode 100644 index 00000000..037f7f9b --- /dev/null +++ b/node_modules/micromark/lib/character/unicode-punctuation.mjs @@ -0,0 +1,6 @@ +import unicodePunctuation from '../constant/unicode-punctuation-regex.mjs' +import check from '../util/regex-check.mjs' + +// Size note: removing ASCII from the regex and using `ascii-punctuation` here +// In fact adds to the bundle size. +export default check(unicodePunctuation) diff --git a/node_modules/micromark/lib/character/unicode-whitespace.js b/node_modules/micromark/lib/character/unicode-whitespace.js new file mode 100644 index 00000000..b09537ea --- /dev/null +++ b/node_modules/micromark/lib/character/unicode-whitespace.js @@ -0,0 +1,7 @@ +'use strict' + +var regexCheck = require('../util/regex-check.js') + +var unicodeWhitespace = regexCheck(/\s/) + +module.exports = unicodeWhitespace diff --git a/node_modules/micromark/lib/character/unicode-whitespace.mjs b/node_modules/micromark/lib/character/unicode-whitespace.mjs new file mode 100644 index 00000000..5a7a530a --- /dev/null +++ b/node_modules/micromark/lib/character/unicode-whitespace.mjs @@ -0,0 +1,3 @@ +import check from '../util/regex-check.mjs' + +export default check(/\s/) diff --git a/node_modules/micromark/lib/character/values.d.ts b/node_modules/micromark/lib/character/values.d.ts new file mode 100644 index 00000000..1fbda37a --- /dev/null +++ b/node_modules/micromark/lib/character/values.d.ts @@ -0,0 +1,210 @@ +// This module is generated by `script/`. + +export type Value = + | '\t' + | '\n' + | '\r' + | ' ' + | '!' + | '"' + | '#' + | '$' + | '%' + | '&' + | "'" + | '(' + | ')' + | '*' + | '+' + | ',' + | '-' + | '.' + | '/' + | '0' + | '1' + | '2' + | '3' + | '4' + | '5' + | '6' + | '7' + | '8' + | '9' + | ':' + | ';' + | '<' + | '=' + | '>' + | '?' + | '@' + | 'A' + | 'B' + | 'C' + | 'D' + | 'E' + | 'F' + | 'G' + | 'H' + | 'I' + | 'J' + | 'K' + | 'L' + | 'M' + | 'N' + | 'O' + | 'P' + | 'Q' + | 'R' + | 'S' + | 'T' + | 'U' + | 'V' + | 'W' + | 'X' + | 'Y' + | 'Z' + | '[' + | '\\' + | ']' + | '^' + | '_' + | '`' + | 'a' + | 'b' + | 'c' + | 'd' + | 'e' + | 'f' + | 'g' + | 'h' + | 'i' + | 'j' + | 'k' + | 'l' + | 'm' + | 'n' + | 'o' + | 'p' + | 'q' + | 'r' + | 's' + | 't' + | 'u' + | 'v' + | 'w' + | 'x' + | 'y' + | 'z' + | '{' + | '|' + | '}' + | '~' + | '�' + +// @for-script: REMOVE_ALL_THING_BELOW + +export interface Values { + ht: '\t' + lf: '\n' + cr: '\r' + space: ' ' + exclamationMark: '!' + quotationMark: '"' + numberSign: '#' + dollarSign: '$' + percentSign: '%' + ampersand: '&' + apostrophe: "'" + leftParenthesis: '(' + rightParenthesis: ')' + asterisk: '*' + plusSign: '+' + comma: ',' + dash: '-' + dot: '.' + slash: '/' + digit0: '0' + digit1: '1' + digit2: '2' + digit3: '3' + digit4: '4' + digit5: '5' + digit6: '6' + digit7: '7' + digit8: '8' + digit9: '9' + colon: ':' + semicolon: ';' + lessThan: '<' + equalsTo: '=' + greaterThan: '>' + questionMark: '?' + atSign: '@' + uppercaseA: 'A' + uppercaseB: 'B' + uppercaseC: 'C' + uppercaseD: 'D' + uppercaseE: 'E' + uppercaseF: 'F' + uppercaseG: 'G' + uppercaseH: 'H' + uppercaseI: 'I' + uppercaseJ: 'J' + uppercaseK: 'K' + uppercaseL: 'L' + uppercaseM: 'M' + uppercaseN: 'N' + uppercaseO: 'O' + uppercaseP: 'P' + uppercaseQ: 'Q' + uppercaseR: 'R' + uppercaseS: 'S' + uppercaseT: 'T' + uppercaseU: 'U' + uppercaseV: 'V' + uppercaseW: 'W' + uppercaseX: 'X' + uppercaseY: 'Y' + uppercaseZ: 'Z' + leftSquareBracket: '[' + backslash: '\\' + rightSquareBracket: ']' + caret: '^' + underscore: '_' + graveAccent: '`' + lowercaseA: 'a' + lowercaseB: 'b' + lowercaseC: 'c' + lowercaseD: 'd' + lowercaseE: 'e' + lowercaseF: 'f' + lowercaseG: 'g' + lowercaseH: 'h' + lowercaseI: 'i' + lowercaseJ: 'j' + lowercaseK: 'k' + lowercaseL: 'l' + lowercaseM: 'm' + lowercaseN: 'n' + lowercaseO: 'o' + lowercaseP: 'p' + lowercaseQ: 'q' + lowercaseR: 'r' + lowercaseS: 's' + lowercaseT: 't' + lowercaseU: 'u' + lowercaseV: 'v' + lowercaseW: 'w' + lowercaseX: 'x' + lowercaseY: 'y' + lowercaseZ: 'z' + leftCurlyBrace: '{' + verticalBar: '|' + rightCurlyBrace: '}' + tilde: '~' + replacementCharacter: '�' +} + +declare const value: Values + +export default value diff --git a/node_modules/micromark/lib/character/values.js b/node_modules/micromark/lib/character/values.js new file mode 100644 index 00000000..cd1794fd --- /dev/null +++ b/node_modules/micromark/lib/character/values.js @@ -0,0 +1,111 @@ +'use strict' + +// This module is compiled away! +// +// While micromark works based on character codes, this module includes the +// string versions of ’em. +// The C0 block, except for LF, CR, HT, and w/ the replacement character added, +// are available here. +var values = { + ht: '\t', + lf: '\n', + cr: '\r', + space: ' ', + exclamationMark: '!', + quotationMark: '"', + numberSign: '#', + dollarSign: '$', + percentSign: '%', + ampersand: '&', + apostrophe: "'", + leftParenthesis: '(', + rightParenthesis: ')', + asterisk: '*', + plusSign: '+', + comma: ',', + dash: '-', + dot: '.', + slash: '/', + digit0: '0', + digit1: '1', + digit2: '2', + digit3: '3', + digit4: '4', + digit5: '5', + digit6: '6', + digit7: '7', + digit8: '8', + digit9: '9', + colon: ':', + semicolon: ';', + lessThan: '<', + equalsTo: '=', + greaterThan: '>', + questionMark: '?', + atSign: '@', + uppercaseA: 'A', + uppercaseB: 'B', + uppercaseC: 'C', + uppercaseD: 'D', + uppercaseE: 'E', + uppercaseF: 'F', + uppercaseG: 'G', + uppercaseH: 'H', + uppercaseI: 'I', + uppercaseJ: 'J', + uppercaseK: 'K', + uppercaseL: 'L', + uppercaseM: 'M', + uppercaseN: 'N', + uppercaseO: 'O', + uppercaseP: 'P', + uppercaseQ: 'Q', + uppercaseR: 'R', + uppercaseS: 'S', + uppercaseT: 'T', + uppercaseU: 'U', + uppercaseV: 'V', + uppercaseW: 'W', + uppercaseX: 'X', + uppercaseY: 'Y', + uppercaseZ: 'Z', + leftSquareBracket: '[', + backslash: '\\', + rightSquareBracket: ']', + caret: '^', + underscore: '_', + graveAccent: '`', + lowercaseA: 'a', + lowercaseB: 'b', + lowercaseC: 'c', + lowercaseD: 'd', + lowercaseE: 'e', + lowercaseF: 'f', + lowercaseG: 'g', + lowercaseH: 'h', + lowercaseI: 'i', + lowercaseJ: 'j', + lowercaseK: 'k', + lowercaseL: 'l', + lowercaseM: 'm', + lowercaseN: 'n', + lowercaseO: 'o', + lowercaseP: 'p', + lowercaseQ: 'q', + lowercaseR: 'r', + lowercaseS: 's', + lowercaseT: 't', + lowercaseU: 'u', + lowercaseV: 'v', + lowercaseW: 'w', + lowercaseX: 'x', + lowercaseY: 'y', + lowercaseZ: 'z', + leftCurlyBrace: '{', + verticalBar: '|', + rightCurlyBrace: '}', + tilde: '~', + replacementCharacter: '�' +} + +module.exports = values diff --git a/node_modules/micromark/lib/character/values.mjs b/node_modules/micromark/lib/character/values.mjs new file mode 100644 index 00000000..bc0be3fe --- /dev/null +++ b/node_modules/micromark/lib/character/values.mjs @@ -0,0 +1,107 @@ +// This module is compiled away! +// +// While micromark works based on character codes, this module includes the +// string versions of ’em. +// The C0 block, except for LF, CR, HT, and w/ the replacement character added, +// are available here. +export default { + ht: '\t', + lf: '\n', + cr: '\r', + space: ' ', + exclamationMark: '!', + quotationMark: '"', + numberSign: '#', + dollarSign: '$', + percentSign: '%', + ampersand: '&', + apostrophe: "'", + leftParenthesis: '(', + rightParenthesis: ')', + asterisk: '*', + plusSign: '+', + comma: ',', + dash: '-', + dot: '.', + slash: '/', + digit0: '0', + digit1: '1', + digit2: '2', + digit3: '3', + digit4: '4', + digit5: '5', + digit6: '6', + digit7: '7', + digit8: '8', + digit9: '9', + colon: ':', + semicolon: ';', + lessThan: '<', + equalsTo: '=', + greaterThan: '>', + questionMark: '?', + atSign: '@', + uppercaseA: 'A', + uppercaseB: 'B', + uppercaseC: 'C', + uppercaseD: 'D', + uppercaseE: 'E', + uppercaseF: 'F', + uppercaseG: 'G', + uppercaseH: 'H', + uppercaseI: 'I', + uppercaseJ: 'J', + uppercaseK: 'K', + uppercaseL: 'L', + uppercaseM: 'M', + uppercaseN: 'N', + uppercaseO: 'O', + uppercaseP: 'P', + uppercaseQ: 'Q', + uppercaseR: 'R', + uppercaseS: 'S', + uppercaseT: 'T', + uppercaseU: 'U', + uppercaseV: 'V', + uppercaseW: 'W', + uppercaseX: 'X', + uppercaseY: 'Y', + uppercaseZ: 'Z', + leftSquareBracket: '[', + backslash: '\\', + rightSquareBracket: ']', + caret: '^', + underscore: '_', + graveAccent: '`', + lowercaseA: 'a', + lowercaseB: 'b', + lowercaseC: 'c', + lowercaseD: 'd', + lowercaseE: 'e', + lowercaseF: 'f', + lowercaseG: 'g', + lowercaseH: 'h', + lowercaseI: 'i', + lowercaseJ: 'j', + lowercaseK: 'k', + lowercaseL: 'l', + lowercaseM: 'm', + lowercaseN: 'n', + lowercaseO: 'o', + lowercaseP: 'p', + lowercaseQ: 'q', + lowercaseR: 'r', + lowercaseS: 's', + lowercaseT: 't', + lowercaseU: 'u', + lowercaseV: 'v', + lowercaseW: 'w', + lowercaseX: 'x', + lowercaseY: 'y', + lowercaseZ: 'z', + leftCurlyBrace: '{', + verticalBar: '|', + rightCurlyBrace: '}', + tilde: '~', + replacementCharacter: '�' +} diff --git a/node_modules/micromark/lib/compile/html.js b/node_modules/micromark/lib/compile/html.js new file mode 100644 index 00000000..ab6874b3 --- /dev/null +++ b/node_modules/micromark/lib/compile/html.js @@ -0,0 +1,810 @@ +'use strict' + +var decodeEntity = require('parse-entities/decode-entity.js') +var codes = require('../character/codes.js') +var assign = require('../constant/assign.js') +var constants = require('../constant/constants.js') +var hasOwnProperty = require('../constant/has-own-property.js') +var types = require('../constant/types.js') +var combineHtmlExtensions = require('../util/combine-html-extensions.js') +var chunkedPush = require('../util/chunked-push.js') +var miniflat = require('../util/miniflat.js') +var normalizeIdentifier = require('../util/normalize-identifier.js') +var normalizeUri = require('../util/normalize-uri.js') +var safeFromInt = require('../util/safe-from-int.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var decodeEntity__default = /*#__PURE__*/ _interopDefaultLegacy(decodeEntity) + +// While micromark is a lexer/tokenizer, the common case of going from markdown + +// This ensures that certain characters which have special meaning in HTML are +// dealt with. +// Technically, we can skip `>` and `"` in many cases, but CM includes them. +var characterReferences = {'"': 'quot', '&': 'amp', '<': 'lt', '>': 'gt'} + +// These two are allowlists of essentially safe protocols for full URLs in +// respectively the `href` (on `<a>`) and `src` (on `<img>`) attributes. +// They are based on what is allowed on GitHub, +// <https://github.com/syntax-tree/hast-util-sanitize/blob/9275b21/lib/github.json#L31> +var protocolHref = /^(https?|ircs?|mailto|xmpp)$/i +var protocolSrc = /^https?$/i + +function compileHtml(options) { + // Configuration. + // Includes `htmlExtensions` (an array of extensions), `defaultLineEnding` (a + // preferred EOL), `allowDangerousProtocol` (whether to allow potential + // dangerous protocols), and `allowDangerousHtml` (whether to allow potential + // dangerous HTML). + var settings = options || {} + // Tags is needed because according to markdown, links and emphasis and + // whatnot can exist in images, however, as HTML doesn’t allow content in + // images, the tags are ignored in the `alt` attribute, but the content + // remains. + var tags = true + // An object to track identifiers to media (URLs and titles) defined with + // definitions. + var definitions = {} + // A lot of the handlers need to capture some of the output data, modify it + // somehow, and then deal with it. + // We do that by tracking a stack of buffers, that can be opened (with + // `buffer`) and closed (with `resume`) to access them. + var buffers = [[]] + // As we can have links in images and the other way around, where the deepest + // ones are closed first, we need to track which one we’re in. + var mediaStack = [] + // Same for tightness, which is specific to lists. + // We need to track if we’re currently in a tight or loose container. + var tightStack = [] + + var defaultHandlers = { + enter: { + blockQuote: onenterblockquote, + codeFenced: onentercodefenced, + codeFencedFenceInfo: buffer, + codeFencedFenceMeta: buffer, + codeIndented: onentercodeindented, + codeText: onentercodetext, + content: onentercontent, + definition: onenterdefinition, + definitionDestinationString: onenterdefinitiondestinationstring, + definitionLabelString: buffer, + definitionTitleString: buffer, + emphasis: onenteremphasis, + htmlFlow: onenterhtmlflow, + htmlText: onenterhtml, + image: onenterimage, + label: buffer, + link: onenterlink, + listItemMarker: onenterlistitemmarker, + listItemValue: onenterlistitemvalue, + listOrdered: onenterlistordered, + listUnordered: onenterlistunordered, + paragraph: onenterparagraph, + reference: buffer, + resource: onenterresource, + resourceDestinationString: onenterresourcedestinationstring, + resourceTitleString: buffer, + setextHeading: onentersetextheading, + strong: onenterstrong + }, + exit: { + atxHeading: onexitatxheading, + atxHeadingSequence: onexitatxheadingsequence, + autolinkEmail: onexitautolinkemail, + autolinkProtocol: onexitautolinkprotocol, + blockQuote: onexitblockquote, + characterEscapeValue: onexitdata, + characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker, + characterReferenceMarkerNumeric: onexitcharacterreferencemarker, + characterReferenceValue: onexitcharacterreferencevalue, + codeFenced: onexitflowcode, + codeFencedFence: onexitcodefencedfence, + codeFencedFenceInfo: onexitcodefencedfenceinfo, + codeFencedFenceMeta: resume, + codeFlowValue: onexitcodeflowvalue, + codeIndented: onexitflowcode, + codeText: onexitcodetext, + codeTextData: onexitdata, + data: onexitdata, + definition: onexitdefinition, + definitionDestinationString: onexitdefinitiondestinationstring, + definitionLabelString: onexitdefinitionlabelstring, + definitionTitleString: onexitdefinitiontitlestring, + emphasis: onexitemphasis, + hardBreakEscape: onexithardbreak, + hardBreakTrailing: onexithardbreak, + htmlFlow: onexithtml, + htmlFlowData: onexitdata, + htmlText: onexithtml, + htmlTextData: onexitdata, + image: onexitmedia, + label: onexitlabel, + labelText: onexitlabeltext, + lineEnding: onexitlineending, + link: onexitmedia, + listOrdered: onexitlistordered, + listUnordered: onexitlistunordered, + paragraph: onexitparagraph, + reference: resume, + referenceString: onexitreferencestring, + resource: resume, + resourceDestinationString: onexitresourcedestinationstring, + resourceTitleString: onexitresourcetitlestring, + setextHeading: onexitsetextheading, + setextHeadingLineSequence: onexitsetextheadinglinesequence, + setextHeadingText: onexitsetextheadingtext, + strong: onexitstrong, + thematicBreak: onexitthematicbreak + } + } + + // Combine the HTML extensions with the default handlers. + // An HTML extension is an object whose fields are either `enter` or `exit` + // (reflecting whether a token is entered or exited). + // The values at such objects are names of tokens mapping to handlers. + // Handlers are called, respectively when a token is opener or closed, with + // that token, and a context as `this`. + var handlers = combineHtmlExtensions( + [defaultHandlers].concat(miniflat(settings.htmlExtensions)) + ) + + // Handlers do often need to keep track of some state. + // That state is provided here as a key-value store (an object). + var data = {tightStack: tightStack} + + // The context for handlers references a couple of useful functions. + // In handlers from extensions, those can be accessed at `this`. + // For the handlers here, they can be accessed directly. + var context = { + lineEndingIfNeeded: lineEndingIfNeeded, + options: settings, + encode: encode, + raw: raw, + tag: tag, + buffer: buffer, + resume: resume, + setData: setData, + getData: getData + } + + // Generally, micromark copies line endings (`'\r'`, `'\n'`, `'\r\n'`) in the + // markdown document over to the compiled HTML. + // In some cases, such as `> a`, CommonMark requires that extra line endings + // are added: `<blockquote>\n<p>a</p>\n</blockquote>`. + // This variable hold the default line ending when given (or `undefined`), + // and in the latter case will be updated to the first found line ending if + // there is one. + var lineEndingStyle = settings.defaultLineEnding + + // Return the function that handles a slice of events. + return compile + + // Deal w/ a slice of events. + // Return either the empty string if there’s nothing of note to return, or the + // result when done. + function compile(events) { + // As definitions can come after references, we need to figure out the media + // (urls and titles) defined by them before handling the references. + // So, we do sort of what HTML does: put metadata at the start (in head), and + // then put content after (`body`). + var head = [] + var body = [] + var index + var start + var listStack + var handler + var result + + index = -1 + start = 0 + listStack = [] + + while (++index < events.length) { + // Figure out the line ending style used in the document. + if ( + !lineEndingStyle && + (events[index][1].type === types.lineEnding || + events[index][1].type === types.lineEndingBlank) + ) { + lineEndingStyle = events[index][2].sliceSerialize(events[index][1]) + } + + // Preprocess lists to infer whether the list is loose or not. + if ( + events[index][1].type === types.listOrdered || + events[index][1].type === types.listUnordered + ) { + if (events[index][0] === 'enter') { + listStack.push(index) + } else { + prepareList(events.slice(listStack.pop(), index)) + } + } + + // Move definitions to the front. + if (events[index][1].type === types.definition) { + if (events[index][0] === 'enter') { + body = chunkedPush(body, events.slice(start, index)) + start = index + } else { + head = chunkedPush(head, events.slice(start, index + 1)) + start = index + 1 + } + } + } + + head = chunkedPush(head, body) + head = chunkedPush(head, events.slice(start)) + result = head + index = -1 + + // Handle the start of the document, if defined. + if (handlers.enter.null) { + handlers.enter.null.call(context) + } + + // Handle all events. + while (++index < events.length) { + handler = handlers[result[index][0]] + + if (hasOwnProperty.call(handler, result[index][1].type)) { + handler[result[index][1].type].call( + assign({sliceSerialize: result[index][2].sliceSerialize}, context), + result[index][1] + ) + } + } + + // Handle the end of the document, if defined. + if (handlers.exit.null) { + handlers.exit.null.call(context) + } + + return buffers[0].join('') + } + + // Figure out whether lists are loose or not. + function prepareList(slice) { + var length = slice.length - 1 // Skip close. + var index = 0 // Skip open. + var containerBalance = 0 + var loose + var atMarker + var event + + while (++index < length) { + event = slice[index] + + if (event[1]._container) { + atMarker = undefined + + if (event[0] === 'enter') { + containerBalance++ + } else { + containerBalance-- + } + } else if (event[1].type === types.listItemPrefix) { + if (event[0] === 'exit') { + atMarker = true + } + } else if (event[1].type === types.linePrefix); + else if (event[1].type === types.lineEndingBlank) { + if (event[0] === 'enter' && !containerBalance) { + if (atMarker) { + atMarker = undefined + } else { + loose = true + } + } + } else { + atMarker = undefined + } + } + + slice[0][1]._loose = loose + } + + // Set data into the key-value store. + function setData(key, value) { + data[key] = value + } + + // Get data from the key-value store. + function getData(key) { + return data[key] + } + + // Capture some of the output data. + function buffer() { + buffers.push([]) + } + + // Stop capturing and access the output data. + function resume() { + return buffers.pop().join('') + } + + // Output (parts of) HTML tags. + function tag(value) { + if (!tags) return + setData('lastWasTag', true) + buffers[buffers.length - 1].push(value) + } + + // Output raw data. + function raw(value) { + setData('lastWasTag') + buffers[buffers.length - 1].push(value) + } + + // Output an extra line ending. + function lineEnding() { + raw(lineEndingStyle || '\n') + } + + // Output an extra line ending if the previous value wasn’t EOF/EOL. + function lineEndingIfNeeded() { + var buffer = buffers[buffers.length - 1] + var slice = buffer[buffer.length - 1] + var previous = slice ? slice.charCodeAt(slice.length - 1) : codes.eof + + if ( + previous === codes.lf || + previous === codes.cr || + previous === codes.eof + ) { + return + } + + lineEnding() + } + + // Make a value safe for injection in HTML (except w/ `ignoreEncode`). + function encode(value) { + return getData('ignoreEncode') ? value : value.replace(/["&<>]/g, replace) + function replace(value) { + return '&' + characterReferences[value] + ';' + } + } + + // Make a value safe for injection as a URL. + // This does encode unsafe characters with percent-encoding, skipping already + // encoded sequences (`normalizeUri`). + // Further unsafe characters are encoded as character references (`encode`). + // Finally, if the URL includes an unknown protocol (such as a dangerous + // example, `javascript:`), the value is ignored. + function url(url, protocol) { + var value = encode(normalizeUri(url || '')) + var colon = value.indexOf(':') + var questionMark = value.indexOf('?') + var numberSign = value.indexOf('#') + var slash = value.indexOf('/') + + if ( + settings.allowDangerousProtocol || + // If there is no protocol, it’s relative. + colon < 0 || + // If the first colon is after a `?`, `#`, or `/`, it’s not a protocol. + (slash > -1 && colon > slash) || + (questionMark > -1 && colon > questionMark) || + (numberSign > -1 && colon > numberSign) || + // It is a protocol, it should be allowed. + protocol.test(value.slice(0, colon)) + ) { + return value + } + + return '' + } + + // + // Handlers. + // + + function onenterlistordered(token) { + tightStack.push(!token._loose) + lineEndingIfNeeded() + tag('<ol') + setData('expectFirstItem', true) + } + + function onenterlistunordered(token) { + tightStack.push(!token._loose) + lineEndingIfNeeded() + tag('<ul') + setData('expectFirstItem', true) + } + + function onenterlistitemvalue(token) { + var value + + if (getData('expectFirstItem')) { + value = parseInt(this.sliceSerialize(token), constants.numericBaseDecimal) + + if (value !== 1) { + tag(' start="' + encode(String(value)) + '"') + } + } + } + + function onenterlistitemmarker() { + if (getData('expectFirstItem')) { + tag('>') + } else { + onexitlistitem() + } + + lineEndingIfNeeded() + tag('<li>') + setData('expectFirstItem') + // “Hack” to prevent a line ending from showing up if the item is empty. + setData('lastWasTag') + } + + function onexitlistordered() { + onexitlistitem() + tightStack.pop() + lineEnding() + tag('</ol>') + } + + function onexitlistunordered() { + onexitlistitem() + tightStack.pop() + lineEnding() + tag('</ul>') + } + + function onexitlistitem() { + if (getData('lastWasTag') && !getData('slurpAllLineEndings')) { + lineEndingIfNeeded() + } + + tag('</li>') + setData('slurpAllLineEndings') + } + + function onenterblockquote() { + tightStack.push(false) + lineEndingIfNeeded() + tag('<blockquote>') + } + + function onexitblockquote() { + tightStack.pop() + lineEndingIfNeeded() + tag('</blockquote>') + setData('slurpAllLineEndings') + } + + function onenterparagraph() { + if (!tightStack[tightStack.length - 1]) { + lineEndingIfNeeded() + tag('<p>') + } + + setData('slurpAllLineEndings') + } + + function onexitparagraph() { + if (tightStack[tightStack.length - 1]) { + setData('slurpAllLineEndings', true) + } else { + tag('</p>') + } + } + + function onentercodefenced() { + lineEndingIfNeeded() + tag('<pre><code') + setData('fencesCount', 0) + } + + function onexitcodefencedfenceinfo() { + var value = resume() + tag(' class="language-' + value + '"') + } + + function onexitcodefencedfence() { + if (!getData('fencesCount')) { + tag('>') + setData('fencedCodeInside', true) + setData('slurpOneLineEnding', true) + } + + setData('fencesCount', getData('fencesCount') + 1) + } + + function onentercodeindented() { + lineEndingIfNeeded() + tag('<pre><code>') + } + + function onexitflowcode() { + // Send an extra line feed if we saw data. + if (getData('flowCodeSeenData')) lineEndingIfNeeded() + tag('</code></pre>') + if (getData('fencesCount') < 2) lineEndingIfNeeded() + setData('flowCodeSeenData') + setData('fencesCount') + setData('slurpOneLineEnding') + } + + function onenterimage() { + mediaStack.push({image: true}) + tags = undefined // Disallow tags. + } + + function onenterlink() { + mediaStack.push({}) + } + + function onexitlabeltext(token) { + mediaStack[mediaStack.length - 1].labelId = this.sliceSerialize(token) + } + + function onexitlabel() { + mediaStack[mediaStack.length - 1].label = resume() + } + + function onexitreferencestring(token) { + mediaStack[mediaStack.length - 1].referenceId = this.sliceSerialize(token) + } + + function onenterresource() { + buffer() // We can have line endings in the resource, ignore them. + mediaStack[mediaStack.length - 1].destination = '' + } + + function onenterresourcedestinationstring() { + buffer() + // Ignore encoding the result, as we’ll first percent encode the url and + // encode manually after. + setData('ignoreEncode', true) + } + + function onexitresourcedestinationstring() { + mediaStack[mediaStack.length - 1].destination = resume() + setData('ignoreEncode') + } + + function onexitresourcetitlestring() { + mediaStack[mediaStack.length - 1].title = resume() + } + + function onexitmedia() { + var index = mediaStack.length - 1 // Skip current. + var media = mediaStack[index] + var context = + media.destination === undefined + ? definitions[normalizeIdentifier(media.referenceId || media.labelId)] + : media + + tags = true + + while (index--) { + if (mediaStack[index].image) { + tags = undefined + break + } + } + + if (media.image) { + tag('<img src="' + url(context.destination, protocolSrc) + '" alt="') + raw(media.label) + tag('"') + } else { + tag('<a href="' + url(context.destination, protocolHref) + '"') + } + + tag(context.title ? ' title="' + context.title + '"' : '') + + if (media.image) { + tag(' />') + } else { + tag('>') + raw(media.label) + tag('</a>') + } + + mediaStack.pop() + } + + function onenterdefinition() { + buffer() + mediaStack.push({}) + } + + function onexitdefinitionlabelstring(token) { + // Discard label, use the source content instead. + resume() + mediaStack[mediaStack.length - 1].labelId = this.sliceSerialize(token) + } + + function onenterdefinitiondestinationstring() { + buffer() + setData('ignoreEncode', true) + } + + function onexitdefinitiondestinationstring() { + mediaStack[mediaStack.length - 1].destination = resume() + setData('ignoreEncode') + } + + function onexitdefinitiontitlestring() { + mediaStack[mediaStack.length - 1].title = resume() + } + + function onexitdefinition() { + var id = normalizeIdentifier(mediaStack[mediaStack.length - 1].labelId) + + resume() + + if (!hasOwnProperty.call(definitions, id)) { + definitions[id] = mediaStack[mediaStack.length - 1] + } + + mediaStack.pop() + } + + function onentercontent() { + setData('slurpAllLineEndings', true) + } + + function onexitatxheadingsequence(token) { + // Exit for further sequences. + if (getData('headingRank')) return + setData('headingRank', this.sliceSerialize(token).length) + lineEndingIfNeeded() + tag('<h' + getData('headingRank') + '>') + } + + function onentersetextheading() { + buffer() + setData('slurpAllLineEndings') + } + + function onexitsetextheadingtext() { + setData('slurpAllLineEndings', true) + } + + function onexitatxheading() { + tag('</h' + getData('headingRank') + '>') + setData('headingRank') + } + + function onexitsetextheadinglinesequence(token) { + setData( + 'headingRank', + this.sliceSerialize(token).charCodeAt(0) === codes.equalsTo ? 1 : 2 + ) + } + + function onexitsetextheading() { + var value = resume() + lineEndingIfNeeded() + tag('<h' + getData('headingRank') + '>') + raw(value) + tag('</h' + getData('headingRank') + '>') + setData('slurpAllLineEndings') + setData('headingRank') + } + + function onexitdata(token) { + raw(encode(this.sliceSerialize(token))) + } + + function onexitlineending(token) { + if (getData('slurpAllLineEndings')) { + return + } + + if (getData('slurpOneLineEnding')) { + setData('slurpOneLineEnding') + return + } + + if (getData('inCodeText')) { + raw(' ') + return + } + + raw(encode(this.sliceSerialize(token))) + } + + function onexitcodeflowvalue(token) { + raw(encode(this.sliceSerialize(token))) + setData('flowCodeSeenData', true) + } + + function onexithardbreak() { + tag('<br />') + } + + function onenterhtmlflow() { + lineEndingIfNeeded() + onenterhtml() + } + + function onexithtml() { + setData('ignoreEncode') + } + + function onenterhtml() { + if (settings.allowDangerousHtml) { + setData('ignoreEncode', true) + } + } + + function onenteremphasis() { + tag('<em>') + } + + function onenterstrong() { + tag('<strong>') + } + + function onentercodetext() { + setData('inCodeText', true) + tag('<code>') + } + + function onexitcodetext() { + setData('inCodeText') + tag('</code>') + } + + function onexitemphasis() { + tag('</em>') + } + + function onexitstrong() { + tag('</strong>') + } + + function onexitthematicbreak() { + lineEndingIfNeeded() + tag('<hr />') + } + + function onexitcharacterreferencemarker(token) { + setData('characterReferenceType', token.type) + } + + function onexitcharacterreferencevalue(token) { + var value = this.sliceSerialize(token) + + value = getData('characterReferenceType') + ? safeFromInt( + value, + getData('characterReferenceType') === + types.characterReferenceMarkerNumeric + ? constants.numericBaseDecimal + : constants.numericBaseHexadecimal + ) + : decodeEntity__default['default'](value) + + raw(encode(value)) + setData('characterReferenceType') + } + + function onexitautolinkprotocol(token) { + var uri = this.sliceSerialize(token) + tag('<a href="' + url(uri, protocolHref) + '">') + raw(encode(uri)) + tag('</a>') + } + + function onexitautolinkemail(token) { + var uri = this.sliceSerialize(token) + tag('<a href="' + url('mailto:' + uri, protocolHref) + '">') + raw(encode(uri)) + tag('</a>') + } +} + +module.exports = compileHtml diff --git a/node_modules/micromark/lib/compile/html.mjs b/node_modules/micromark/lib/compile/html.mjs new file mode 100644 index 00000000..3cfbe1a6 --- /dev/null +++ b/node_modules/micromark/lib/compile/html.mjs @@ -0,0 +1,813 @@ +// While micromark is a lexer/tokenizer, the common case of going from markdown +// to html is currently built in as this module, even though the parts can be +// used separately to build ASTs, CSTs, or many other output formats. +// +// Having an HTML compiler built in is useful because it allows us to check for +// compliancy to CommonMark, the de facto norm of markdown, specified in roughly +// 600 input/output cases. +// +// This module has an interface which accepts lists of events instead of the +// whole at once, however, because markdown can’t be truly streaming, we buffer +// events before processing and outputting the final result. + +export default compileHtml + +import decodeEntity from 'parse-entities/decode-entity.js' +import codes from '../character/codes.mjs' +import assign from '../constant/assign.mjs' +import constants from '../constant/constants.mjs' +import own from '../constant/has-own-property.mjs' +import types from '../constant/types.mjs' +import combineHtmlExtensions from '../util/combine-html-extensions.mjs' +import chunkedPush from '../util/chunked-push.mjs' +import miniflat from '../util/miniflat.mjs' +import normalizeIdentifier from '../util/normalize-identifier.mjs' +import normalizeUri from '../util/normalize-uri.mjs' +import safeFromInt from '../util/safe-from-int.mjs' + +// This ensures that certain characters which have special meaning in HTML are +// dealt with. +// Technically, we can skip `>` and `"` in many cases, but CM includes them. +var characterReferences = {'"': 'quot', '&': 'amp', '<': 'lt', '>': 'gt'} + +// These two are allowlists of essentially safe protocols for full URLs in +// respectively the `href` (on `<a>`) and `src` (on `<img>`) attributes. +// They are based on what is allowed on GitHub, +// <https://github.com/syntax-tree/hast-util-sanitize/blob/9275b21/lib/github.json#L31> +var protocolHref = /^(https?|ircs?|mailto|xmpp)$/i +var protocolSrc = /^https?$/i + +function compileHtml(options) { + // Configuration. + // Includes `htmlExtensions` (an array of extensions), `defaultLineEnding` (a + // preferred EOL), `allowDangerousProtocol` (whether to allow potential + // dangerous protocols), and `allowDangerousHtml` (whether to allow potential + // dangerous HTML). + var settings = options || {} + // Tags is needed because according to markdown, links and emphasis and + // whatnot can exist in images, however, as HTML doesn’t allow content in + // images, the tags are ignored in the `alt` attribute, but the content + // remains. + var tags = true + // An object to track identifiers to media (URLs and titles) defined with + // definitions. + var definitions = {} + // A lot of the handlers need to capture some of the output data, modify it + // somehow, and then deal with it. + // We do that by tracking a stack of buffers, that can be opened (with + // `buffer`) and closed (with `resume`) to access them. + var buffers = [[]] + // As we can have links in images and the other way around, where the deepest + // ones are closed first, we need to track which one we’re in. + var mediaStack = [] + // Same for tightness, which is specific to lists. + // We need to track if we’re currently in a tight or loose container. + var tightStack = [] + + var defaultHandlers = { + enter: { + blockQuote: onenterblockquote, + codeFenced: onentercodefenced, + codeFencedFenceInfo: buffer, + codeFencedFenceMeta: buffer, + codeIndented: onentercodeindented, + codeText: onentercodetext, + content: onentercontent, + definition: onenterdefinition, + definitionDestinationString: onenterdefinitiondestinationstring, + definitionLabelString: buffer, + definitionTitleString: buffer, + emphasis: onenteremphasis, + htmlFlow: onenterhtmlflow, + htmlText: onenterhtml, + image: onenterimage, + label: buffer, + link: onenterlink, + listItemMarker: onenterlistitemmarker, + listItemValue: onenterlistitemvalue, + listOrdered: onenterlistordered, + listUnordered: onenterlistunordered, + paragraph: onenterparagraph, + reference: buffer, + resource: onenterresource, + resourceDestinationString: onenterresourcedestinationstring, + resourceTitleString: buffer, + setextHeading: onentersetextheading, + strong: onenterstrong + }, + exit: { + atxHeading: onexitatxheading, + atxHeadingSequence: onexitatxheadingsequence, + autolinkEmail: onexitautolinkemail, + autolinkProtocol: onexitautolinkprotocol, + blockQuote: onexitblockquote, + characterEscapeValue: onexitdata, + characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker, + characterReferenceMarkerNumeric: onexitcharacterreferencemarker, + characterReferenceValue: onexitcharacterreferencevalue, + codeFenced: onexitflowcode, + codeFencedFence: onexitcodefencedfence, + codeFencedFenceInfo: onexitcodefencedfenceinfo, + codeFencedFenceMeta: resume, + codeFlowValue: onexitcodeflowvalue, + codeIndented: onexitflowcode, + codeText: onexitcodetext, + codeTextData: onexitdata, + data: onexitdata, + definition: onexitdefinition, + definitionDestinationString: onexitdefinitiondestinationstring, + definitionLabelString: onexitdefinitionlabelstring, + definitionTitleString: onexitdefinitiontitlestring, + emphasis: onexitemphasis, + hardBreakEscape: onexithardbreak, + hardBreakTrailing: onexithardbreak, + htmlFlow: onexithtml, + htmlFlowData: onexitdata, + htmlText: onexithtml, + htmlTextData: onexitdata, + image: onexitmedia, + label: onexitlabel, + labelText: onexitlabeltext, + lineEnding: onexitlineending, + link: onexitmedia, + listOrdered: onexitlistordered, + listUnordered: onexitlistunordered, + paragraph: onexitparagraph, + reference: resume, + referenceString: onexitreferencestring, + resource: resume, + resourceDestinationString: onexitresourcedestinationstring, + resourceTitleString: onexitresourcetitlestring, + setextHeading: onexitsetextheading, + setextHeadingLineSequence: onexitsetextheadinglinesequence, + setextHeadingText: onexitsetextheadingtext, + strong: onexitstrong, + thematicBreak: onexitthematicbreak + } + } + + // Combine the HTML extensions with the default handlers. + // An HTML extension is an object whose fields are either `enter` or `exit` + // (reflecting whether a token is entered or exited). + // The values at such objects are names of tokens mapping to handlers. + // Handlers are called, respectively when a token is opener or closed, with + // that token, and a context as `this`. + var handlers = combineHtmlExtensions( + [defaultHandlers].concat(miniflat(settings.htmlExtensions)) + ) + + // Handlers do often need to keep track of some state. + // That state is provided here as a key-value store (an object). + var data = {tightStack: tightStack} + + // The context for handlers references a couple of useful functions. + // In handlers from extensions, those can be accessed at `this`. + // For the handlers here, they can be accessed directly. + var context = { + lineEndingIfNeeded: lineEndingIfNeeded, + options: settings, + encode: encode, + raw: raw, + tag: tag, + buffer: buffer, + resume: resume, + setData: setData, + getData: getData + } + + // Generally, micromark copies line endings (`'\r'`, `'\n'`, `'\r\n'`) in the + // markdown document over to the compiled HTML. + // In some cases, such as `> a`, CommonMark requires that extra line endings + // are added: `<blockquote>\n<p>a</p>\n</blockquote>`. + // This variable hold the default line ending when given (or `undefined`), + // and in the latter case will be updated to the first found line ending if + // there is one. + var lineEndingStyle = settings.defaultLineEnding + + // Return the function that handles a slice of events. + return compile + + // Deal w/ a slice of events. + // Return either the empty string if there’s nothing of note to return, or the + // result when done. + function compile(events) { + // As definitions can come after references, we need to figure out the media + // (urls and titles) defined by them before handling the references. + // So, we do sort of what HTML does: put metadata at the start (in head), and + // then put content after (`body`). + var head = [] + var body = [] + var index + var start + var listStack + var handler + var result + + index = -1 + start = 0 + listStack = [] + + while (++index < events.length) { + // Figure out the line ending style used in the document. + if ( + !lineEndingStyle && + (events[index][1].type === types.lineEnding || + events[index][1].type === types.lineEndingBlank) + ) { + lineEndingStyle = events[index][2].sliceSerialize(events[index][1]) + } + + // Preprocess lists to infer whether the list is loose or not. + if ( + events[index][1].type === types.listOrdered || + events[index][1].type === types.listUnordered + ) { + if (events[index][0] === 'enter') { + listStack.push(index) + } else { + prepareList(events.slice(listStack.pop(), index)) + } + } + + // Move definitions to the front. + if (events[index][1].type === types.definition) { + if (events[index][0] === 'enter') { + body = chunkedPush(body, events.slice(start, index)) + start = index + } else { + head = chunkedPush(head, events.slice(start, index + 1)) + start = index + 1 + } + } + } + + head = chunkedPush(head, body) + head = chunkedPush(head, events.slice(start)) + result = head + index = -1 + + // Handle the start of the document, if defined. + if (handlers.enter.null) { + handlers.enter.null.call(context) + } + + // Handle all events. + while (++index < events.length) { + handler = handlers[result[index][0]] + + if (own.call(handler, result[index][1].type)) { + handler[result[index][1].type].call( + assign({sliceSerialize: result[index][2].sliceSerialize}, context), + result[index][1] + ) + } + } + + // Handle the end of the document, if defined. + if (handlers.exit.null) { + handlers.exit.null.call(context) + } + + return buffers[0].join('') + } + + // Figure out whether lists are loose or not. + function prepareList(slice) { + var length = slice.length - 1 // Skip close. + var index = 0 // Skip open. + var containerBalance = 0 + var loose + var atMarker + var event + + while (++index < length) { + event = slice[index] + + if (event[1]._container) { + atMarker = undefined + + if (event[0] === 'enter') { + containerBalance++ + } else { + containerBalance-- + } + } else if (event[1].type === types.listItemPrefix) { + if (event[0] === 'exit') { + atMarker = true + } + } else if (event[1].type === types.linePrefix) { + // Ignore + } else if (event[1].type === types.lineEndingBlank) { + if (event[0] === 'enter' && !containerBalance) { + if (atMarker) { + atMarker = undefined + } else { + loose = true + } + } + } else { + atMarker = undefined + } + } + + slice[0][1]._loose = loose + } + + // Set data into the key-value store. + function setData(key, value) { + data[key] = value + } + + // Get data from the key-value store. + function getData(key) { + return data[key] + } + + // Capture some of the output data. + function buffer() { + buffers.push([]) + } + + // Stop capturing and access the output data. + function resume() { + return buffers.pop().join('') + } + + // Output (parts of) HTML tags. + function tag(value) { + if (!tags) return + setData('lastWasTag', true) + buffers[buffers.length - 1].push(value) + } + + // Output raw data. + function raw(value) { + setData('lastWasTag') + buffers[buffers.length - 1].push(value) + } + + // Output an extra line ending. + function lineEnding() { + raw(lineEndingStyle || '\n') + } + + // Output an extra line ending if the previous value wasn’t EOF/EOL. + function lineEndingIfNeeded() { + var buffer = buffers[buffers.length - 1] + var slice = buffer[buffer.length - 1] + var previous = slice ? slice.charCodeAt(slice.length - 1) : codes.eof + + if ( + previous === codes.lf || + previous === codes.cr || + previous === codes.eof + ) { + return + } + + lineEnding() + } + + // Make a value safe for injection in HTML (except w/ `ignoreEncode`). + function encode(value) { + return getData('ignoreEncode') ? value : value.replace(/["&<>]/g, replace) + function replace(value) { + return '&' + characterReferences[value] + ';' + } + } + + // Make a value safe for injection as a URL. + // This does encode unsafe characters with percent-encoding, skipping already + // encoded sequences (`normalizeUri`). + // Further unsafe characters are encoded as character references (`encode`). + // Finally, if the URL includes an unknown protocol (such as a dangerous + // example, `javascript:`), the value is ignored. + function url(url, protocol) { + var value = encode(normalizeUri(url || '')) + var colon = value.indexOf(':') + var questionMark = value.indexOf('?') + var numberSign = value.indexOf('#') + var slash = value.indexOf('/') + + if ( + settings.allowDangerousProtocol || + // If there is no protocol, it’s relative. + colon < 0 || + // If the first colon is after a `?`, `#`, or `/`, it’s not a protocol. + (slash > -1 && colon > slash) || + (questionMark > -1 && colon > questionMark) || + (numberSign > -1 && colon > numberSign) || + // It is a protocol, it should be allowed. + protocol.test(value.slice(0, colon)) + ) { + return value + } + + return '' + } + + // + // Handlers. + // + + function onenterlistordered(token) { + tightStack.push(!token._loose) + lineEndingIfNeeded() + tag('<ol') + setData('expectFirstItem', true) + } + + function onenterlistunordered(token) { + tightStack.push(!token._loose) + lineEndingIfNeeded() + tag('<ul') + setData('expectFirstItem', true) + } + + function onenterlistitemvalue(token) { + var value + + if (getData('expectFirstItem')) { + value = parseInt(this.sliceSerialize(token), constants.numericBaseDecimal) + + if (value !== 1) { + tag(' start="' + encode(String(value)) + '"') + } + } + } + + function onenterlistitemmarker() { + if (getData('expectFirstItem')) { + tag('>') + } else { + onexitlistitem() + } + + lineEndingIfNeeded() + tag('<li>') + setData('expectFirstItem') + // “Hack” to prevent a line ending from showing up if the item is empty. + setData('lastWasTag') + } + + function onexitlistordered() { + onexitlistitem() + tightStack.pop() + lineEnding() + tag('</ol>') + } + + function onexitlistunordered() { + onexitlistitem() + tightStack.pop() + lineEnding() + tag('</ul>') + } + + function onexitlistitem() { + if (getData('lastWasTag') && !getData('slurpAllLineEndings')) { + lineEndingIfNeeded() + } + + tag('</li>') + setData('slurpAllLineEndings') + } + + function onenterblockquote() { + tightStack.push(false) + lineEndingIfNeeded() + tag('<blockquote>') + } + + function onexitblockquote() { + tightStack.pop() + lineEndingIfNeeded() + tag('</blockquote>') + setData('slurpAllLineEndings') + } + + function onenterparagraph() { + if (!tightStack[tightStack.length - 1]) { + lineEndingIfNeeded() + tag('<p>') + } + + setData('slurpAllLineEndings') + } + + function onexitparagraph() { + if (tightStack[tightStack.length - 1]) { + setData('slurpAllLineEndings', true) + } else { + tag('</p>') + } + } + + function onentercodefenced() { + lineEndingIfNeeded() + tag('<pre><code') + setData('fencesCount', 0) + } + + function onexitcodefencedfenceinfo() { + var value = resume() + tag(' class="language-' + value + '"') + } + + function onexitcodefencedfence() { + if (!getData('fencesCount')) { + tag('>') + setData('fencedCodeInside', true) + setData('slurpOneLineEnding', true) + } + + setData('fencesCount', getData('fencesCount') + 1) + } + + function onentercodeindented() { + lineEndingIfNeeded() + tag('<pre><code>') + } + + function onexitflowcode() { + // Send an extra line feed if we saw data. + if (getData('flowCodeSeenData')) lineEndingIfNeeded() + tag('</code></pre>') + if (getData('fencesCount') < 2) lineEndingIfNeeded() + setData('flowCodeSeenData') + setData('fencesCount') + setData('slurpOneLineEnding') + } + + function onenterimage() { + mediaStack.push({image: true}) + tags = undefined // Disallow tags. + } + + function onenterlink() { + mediaStack.push({}) + } + + function onexitlabeltext(token) { + mediaStack[mediaStack.length - 1].labelId = this.sliceSerialize(token) + } + + function onexitlabel() { + mediaStack[mediaStack.length - 1].label = resume() + } + + function onexitreferencestring(token) { + mediaStack[mediaStack.length - 1].referenceId = this.sliceSerialize(token) + } + + function onenterresource() { + buffer() // We can have line endings in the resource, ignore them. + mediaStack[mediaStack.length - 1].destination = '' + } + + function onenterresourcedestinationstring() { + buffer() + // Ignore encoding the result, as we’ll first percent encode the url and + // encode manually after. + setData('ignoreEncode', true) + } + + function onexitresourcedestinationstring() { + mediaStack[mediaStack.length - 1].destination = resume() + setData('ignoreEncode') + } + + function onexitresourcetitlestring() { + mediaStack[mediaStack.length - 1].title = resume() + } + + function onexitmedia() { + var index = mediaStack.length - 1 // Skip current. + var media = mediaStack[index] + var context = + media.destination === undefined + ? definitions[normalizeIdentifier(media.referenceId || media.labelId)] + : media + + tags = true + + while (index--) { + if (mediaStack[index].image) { + tags = undefined + break + } + } + + if (media.image) { + tag('<img src="' + url(context.destination, protocolSrc) + '" alt="') + raw(media.label) + tag('"') + } else { + tag('<a href="' + url(context.destination, protocolHref) + '"') + } + + tag(context.title ? ' title="' + context.title + '"' : '') + + if (media.image) { + tag(' />') + } else { + tag('>') + raw(media.label) + tag('</a>') + } + + mediaStack.pop() + } + + function onenterdefinition() { + buffer() + mediaStack.push({}) + } + + function onexitdefinitionlabelstring(token) { + // Discard label, use the source content instead. + resume() + mediaStack[mediaStack.length - 1].labelId = this.sliceSerialize(token) + } + + function onenterdefinitiondestinationstring() { + buffer() + setData('ignoreEncode', true) + } + + function onexitdefinitiondestinationstring() { + mediaStack[mediaStack.length - 1].destination = resume() + setData('ignoreEncode') + } + + function onexitdefinitiontitlestring() { + mediaStack[mediaStack.length - 1].title = resume() + } + + function onexitdefinition() { + var id = normalizeIdentifier(mediaStack[mediaStack.length - 1].labelId) + + resume() + + if (!own.call(definitions, id)) { + definitions[id] = mediaStack[mediaStack.length - 1] + } + + mediaStack.pop() + } + + function onentercontent() { + setData('slurpAllLineEndings', true) + } + + function onexitatxheadingsequence(token) { + // Exit for further sequences. + if (getData('headingRank')) return + setData('headingRank', this.sliceSerialize(token).length) + lineEndingIfNeeded() + tag('<h' + getData('headingRank') + '>') + } + + function onentersetextheading() { + buffer() + setData('slurpAllLineEndings') + } + + function onexitsetextheadingtext() { + setData('slurpAllLineEndings', true) + } + + function onexitatxheading() { + tag('</h' + getData('headingRank') + '>') + setData('headingRank') + } + + function onexitsetextheadinglinesequence(token) { + setData( + 'headingRank', + this.sliceSerialize(token).charCodeAt(0) === codes.equalsTo ? 1 : 2 + ) + } + + function onexitsetextheading() { + var value = resume() + lineEndingIfNeeded() + tag('<h' + getData('headingRank') + '>') + raw(value) + tag('</h' + getData('headingRank') + '>') + setData('slurpAllLineEndings') + setData('headingRank') + } + + function onexitdata(token) { + raw(encode(this.sliceSerialize(token))) + } + + function onexitlineending(token) { + if (getData('slurpAllLineEndings')) { + return + } + + if (getData('slurpOneLineEnding')) { + setData('slurpOneLineEnding') + return + } + + if (getData('inCodeText')) { + raw(' ') + return + } + + raw(encode(this.sliceSerialize(token))) + } + + function onexitcodeflowvalue(token) { + raw(encode(this.sliceSerialize(token))) + setData('flowCodeSeenData', true) + } + + function onexithardbreak() { + tag('<br />') + } + + function onenterhtmlflow() { + lineEndingIfNeeded() + onenterhtml() + } + + function onexithtml() { + setData('ignoreEncode') + } + + function onenterhtml() { + if (settings.allowDangerousHtml) { + setData('ignoreEncode', true) + } + } + + function onenteremphasis() { + tag('<em>') + } + + function onenterstrong() { + tag('<strong>') + } + + function onentercodetext() { + setData('inCodeText', true) + tag('<code>') + } + + function onexitcodetext() { + setData('inCodeText') + tag('</code>') + } + + function onexitemphasis() { + tag('</em>') + } + + function onexitstrong() { + tag('</strong>') + } + + function onexitthematicbreak() { + lineEndingIfNeeded() + tag('<hr />') + } + + function onexitcharacterreferencemarker(token) { + setData('characterReferenceType', token.type) + } + + function onexitcharacterreferencevalue(token) { + var value = this.sliceSerialize(token) + + value = getData('characterReferenceType') + ? safeFromInt( + value, + getData('characterReferenceType') === + types.characterReferenceMarkerNumeric + ? constants.numericBaseDecimal + : constants.numericBaseHexadecimal + ) + : decodeEntity(value) + + raw(encode(value)) + setData('characterReferenceType') + } + + function onexitautolinkprotocol(token) { + var uri = this.sliceSerialize(token) + tag('<a href="' + url(uri, protocolHref) + '">') + raw(encode(uri)) + tag('</a>') + } + + function onexitautolinkemail(token) { + var uri = this.sliceSerialize(token) + tag('<a href="' + url('mailto:' + uri, protocolHref) + '">') + raw(encode(uri)) + tag('</a>') + } +} diff --git a/node_modules/micromark/lib/constant/assign.js b/node_modules/micromark/lib/constant/assign.js new file mode 100644 index 00000000..b6ae48a0 --- /dev/null +++ b/node_modules/micromark/lib/constant/assign.js @@ -0,0 +1,5 @@ +'use strict' + +var assign = Object.assign + +module.exports = assign diff --git a/node_modules/micromark/lib/constant/assign.mjs b/node_modules/micromark/lib/constant/assign.mjs new file mode 100644 index 00000000..8cfbca32 --- /dev/null +++ b/node_modules/micromark/lib/constant/assign.mjs @@ -0,0 +1 @@ +export default Object.assign diff --git a/node_modules/micromark/lib/constant/constants.d.ts b/node_modules/micromark/lib/constant/constants.d.ts new file mode 100644 index 00000000..173ba4ac --- /dev/null +++ b/node_modules/micromark/lib/constant/constants.d.ts @@ -0,0 +1,65 @@ +// This module is generated by `script/`. + +export type Constant = + | 1 + | 2 + | 6 + | 63 + | 32 + | 'CDATA[' + | 7 + | 31 + | 3 + | 'flow' + | 'content' + | 'string' + | 'text' + | 4 + | 5 + | 8 + | 999 + | 10 + | 16 + | 10000 + +// @for-script: REMOVE_ALL_THING_BELOW + +export interface Constants { + attentionSideBefore: 1 + attentionSideAfter: 2 + atxHeadingOpeningFenceSizeMax: 6 + autolinkDomainSizeMax: 63 + autolinkSchemeSizeMax: 32 + cdataOpeningString: 'CDATA[' + characterGroupWhitespace: 1 + characterGroupPunctuation: 2 + characterReferenceDecimalSizeMax: 7 + characterReferenceHexadecimalSizeMax: 6 + characterReferenceNamedSizeMax: 31 + codeFencedSequenceSizeMin: 3 + contentTypeFlow: 'flow' + contentTypeContent: 'content' + contentTypeString: 'string' + contentTypeText: 'text' + hardBreakPrefixSizeMin: 2 + htmlRaw: 1 + htmlComment: 2 + htmlInstruction: 3 + htmlDeclaration: 4 + htmlCdata: 5 + htmlBasic: 6 + htmlComplete: 7 + htmlRawSizeMax: 8 + linkResourceDestinationBalanceMax: 3 + linkReferenceSizeMax: 999 + listItemValueSizeMax: 10 + numericBaseDecimal: 10 + numericBaseHexadecimal: 16 + tabSize: 4 + thematicBreakMarkerCountMin: 3 + v8MaxSafeChunkSize: 10000 +} + +declare const value: Constants + +export default value diff --git a/node_modules/micromark/lib/constant/constants.js b/node_modules/micromark/lib/constant/constants.js new file mode 100644 index 00000000..cd75c071 --- /dev/null +++ b/node_modules/micromark/lib/constant/constants.js @@ -0,0 +1,45 @@ +'use strict' + +// This module is compiled away! +// +// Parsing markdown comes with a couple of constants, such as minimum or maximum +// sizes of certain sequences. +// Additionally, there are a couple symbols used inside micromark. +// These are all defined here, but compiled away by scripts. +var constants = { + attentionSideBefore: 1, // Symbol to mark an attention sequence as before content: `*a` + attentionSideAfter: 2, // Symbol to mark an attention sequence as after content: `a*` + atxHeadingOpeningFenceSizeMax: 6, // 6 number signs is fine, 7 isn’t. + autolinkDomainSizeMax: 63, // 63 characters is fine, 64 is too many. + autolinkSchemeSizeMax: 32, // 32 characters is fine, 33 is too many. + cdataOpeningString: 'CDATA[', // And preceded by `<![`. + characterGroupWhitespace: 1, // Symbol used to indicate a character is whitespace + characterGroupPunctuation: 2, // Symbol used to indicate a character is whitespace + characterReferenceDecimalSizeMax: 7, // `&#9999999;`. + characterReferenceHexadecimalSizeMax: 6, // `&#xff9999;`. + characterReferenceNamedSizeMax: 31, // `&CounterClockwiseContourIntegral;`. + codeFencedSequenceSizeMin: 3, // At least 3 ticks or tildes are needed. + contentTypeFlow: 'flow', + contentTypeContent: 'content', + contentTypeString: 'string', + contentTypeText: 'text', + hardBreakPrefixSizeMin: 2, // At least 2 trailing spaces are needed. + htmlRaw: 1, // Symbol for `<script>` + htmlComment: 2, // Symbol for `<!---->` + htmlInstruction: 3, // Symbol for `<?php?>` + htmlDeclaration: 4, // Symbol for `<!doctype>` + htmlCdata: 5, // Symbol for `<![CDATA[]]>` + htmlBasic: 6, // Symbol for `<div` + htmlComplete: 7, // Symbol for `<x>` + htmlRawSizeMax: 8, // Length of `textarea`. + linkResourceDestinationBalanceMax: 3, // See: <https://spec.commonmark.org/0.29/#link-destination> + linkReferenceSizeMax: 999, // See: <https://spec.commonmark.org/0.29/#link-label> + listItemValueSizeMax: 10, // See: <https://spec.commonmark.org/0.29/#ordered-list-marker> + numericBaseDecimal: 10, + numericBaseHexadecimal: 0x10, + tabSize: 4, // Tabs have a hard-coded size of 4, per CommonMark. + thematicBreakMarkerCountMin: 3, // At least 3 asterisks, dashes, or underscores are needed. + v8MaxSafeChunkSize: 10000 // V8 (and potentially others) have problems injecting giant arrays into other arrays, hence we operate in chunks. +} + +module.exports = constants diff --git a/node_modules/micromark/lib/constant/constants.mjs b/node_modules/micromark/lib/constant/constants.mjs new file mode 100644 index 00000000..65db9017 --- /dev/null +++ b/node_modules/micromark/lib/constant/constants.mjs @@ -0,0 +1,41 @@ +// This module is compiled away! +// +// Parsing markdown comes with a couple of constants, such as minimum or maximum +// sizes of certain sequences. +// Additionally, there are a couple symbols used inside micromark. +// These are all defined here, but compiled away by scripts. +export default { + attentionSideBefore: 1, // Symbol to mark an attention sequence as before content: `*a` + attentionSideAfter: 2, // Symbol to mark an attention sequence as after content: `a*` + atxHeadingOpeningFenceSizeMax: 6, // 6 number signs is fine, 7 isn’t. + autolinkDomainSizeMax: 63, // 63 characters is fine, 64 is too many. + autolinkSchemeSizeMax: 32, // 32 characters is fine, 33 is too many. + cdataOpeningString: 'CDATA[', // And preceded by `<![`. + characterGroupWhitespace: 1, // Symbol used to indicate a character is whitespace + characterGroupPunctuation: 2, // Symbol used to indicate a character is whitespace + characterReferenceDecimalSizeMax: 7, // `&#9999999;`. + characterReferenceHexadecimalSizeMax: 6, // `&#xff9999;`. + characterReferenceNamedSizeMax: 31, // `&CounterClockwiseContourIntegral;`. + codeFencedSequenceSizeMin: 3, // At least 3 ticks or tildes are needed. + contentTypeFlow: 'flow', + contentTypeContent: 'content', + contentTypeString: 'string', + contentTypeText: 'text', + hardBreakPrefixSizeMin: 2, // At least 2 trailing spaces are needed. + htmlRaw: 1, // Symbol for `<script>` + htmlComment: 2, // Symbol for `<!---->` + htmlInstruction: 3, // Symbol for `<?php?>` + htmlDeclaration: 4, // Symbol for `<!doctype>` + htmlCdata: 5, // Symbol for `<![CDATA[]]>` + htmlBasic: 6, // Symbol for `<div` + htmlComplete: 7, // Symbol for `<x>` + htmlRawSizeMax: 8, // Length of `textarea`. + linkResourceDestinationBalanceMax: 3, // See: <https://spec.commonmark.org/0.29/#link-destination> + linkReferenceSizeMax: 999, // See: <https://spec.commonmark.org/0.29/#link-label> + listItemValueSizeMax: 10, // See: <https://spec.commonmark.org/0.29/#ordered-list-marker> + numericBaseDecimal: 10, + numericBaseHexadecimal: 0x10, + tabSize: 4, // Tabs have a hard-coded size of 4, per CommonMark. + thematicBreakMarkerCountMin: 3, // At least 3 asterisks, dashes, or underscores are needed. + v8MaxSafeChunkSize: 10000 // V8 (and potentially others) have problems injecting giant arrays into other arrays, hence we operate in chunks. +} diff --git a/node_modules/micromark/lib/constant/from-char-code.js b/node_modules/micromark/lib/constant/from-char-code.js new file mode 100644 index 00000000..232eac74 --- /dev/null +++ b/node_modules/micromark/lib/constant/from-char-code.js @@ -0,0 +1,5 @@ +'use strict' + +var fromCharCode = String.fromCharCode + +module.exports = fromCharCode diff --git a/node_modules/micromark/lib/constant/from-char-code.mjs b/node_modules/micromark/lib/constant/from-char-code.mjs new file mode 100644 index 00000000..0476a763 --- /dev/null +++ b/node_modules/micromark/lib/constant/from-char-code.mjs @@ -0,0 +1 @@ +export default String.fromCharCode diff --git a/node_modules/micromark/lib/constant/has-own-property.js b/node_modules/micromark/lib/constant/has-own-property.js new file mode 100644 index 00000000..aa9197cd --- /dev/null +++ b/node_modules/micromark/lib/constant/has-own-property.js @@ -0,0 +1,5 @@ +'use strict' + +var own = {}.hasOwnProperty + +module.exports = own diff --git a/node_modules/micromark/lib/constant/has-own-property.mjs b/node_modules/micromark/lib/constant/has-own-property.mjs new file mode 100644 index 00000000..1da16d6d --- /dev/null +++ b/node_modules/micromark/lib/constant/has-own-property.mjs @@ -0,0 +1 @@ +export default {}.hasOwnProperty diff --git a/node_modules/micromark/lib/constant/html-block-names.js b/node_modules/micromark/lib/constant/html-block-names.js new file mode 100644 index 00000000..9b5ada73 --- /dev/null +++ b/node_modules/micromark/lib/constant/html-block-names.js @@ -0,0 +1,69 @@ +'use strict' + +// This module is copied from <https://spec.commonmark.org/0.29/#html-blocks>. +var basics = [ + 'address', + 'article', + 'aside', + 'base', + 'basefont', + 'blockquote', + 'body', + 'caption', + 'center', + 'col', + 'colgroup', + 'dd', + 'details', + 'dialog', + 'dir', + 'div', + 'dl', + 'dt', + 'fieldset', + 'figcaption', + 'figure', + 'footer', + 'form', + 'frame', + 'frameset', + 'h1', + 'h2', + 'h3', + 'h4', + 'h5', + 'h6', + 'head', + 'header', + 'hr', + 'html', + 'iframe', + 'legend', + 'li', + 'link', + 'main', + 'menu', + 'menuitem', + 'nav', + 'noframes', + 'ol', + 'optgroup', + 'option', + 'p', + 'param', + 'section', + 'source', + 'summary', + 'table', + 'tbody', + 'td', + 'tfoot', + 'th', + 'thead', + 'title', + 'tr', + 'track', + 'ul' +] + +module.exports = basics diff --git a/node_modules/micromark/lib/constant/html-block-names.mjs b/node_modules/micromark/lib/constant/html-block-names.mjs new file mode 100644 index 00000000..8d974d8f --- /dev/null +++ b/node_modules/micromark/lib/constant/html-block-names.mjs @@ -0,0 +1,65 @@ +// This module is copied from <https://spec.commonmark.org/0.29/#html-blocks>. +export default [ + 'address', + 'article', + 'aside', + 'base', + 'basefont', + 'blockquote', + 'body', + 'caption', + 'center', + 'col', + 'colgroup', + 'dd', + 'details', + 'dialog', + 'dir', + 'div', + 'dl', + 'dt', + 'fieldset', + 'figcaption', + 'figure', + 'footer', + 'form', + 'frame', + 'frameset', + 'h1', + 'h2', + 'h3', + 'h4', + 'h5', + 'h6', + 'head', + 'header', + 'hr', + 'html', + 'iframe', + 'legend', + 'li', + 'link', + 'main', + 'menu', + 'menuitem', + 'nav', + 'noframes', + 'ol', + 'optgroup', + 'option', + 'p', + 'param', + 'section', + 'source', + 'summary', + 'table', + 'tbody', + 'td', + 'tfoot', + 'th', + 'thead', + 'title', + 'tr', + 'track', + 'ul' +] diff --git a/node_modules/micromark/lib/constant/html-raw-names.js b/node_modules/micromark/lib/constant/html-raw-names.js new file mode 100644 index 00000000..c22a3954 --- /dev/null +++ b/node_modules/micromark/lib/constant/html-raw-names.js @@ -0,0 +1,6 @@ +'use strict' + +// This module is copied from <https://spec.commonmark.org/0.29/#html-blocks>. +var raws = ['pre', 'script', 'style', 'textarea'] + +module.exports = raws diff --git a/node_modules/micromark/lib/constant/html-raw-names.mjs b/node_modules/micromark/lib/constant/html-raw-names.mjs new file mode 100644 index 00000000..2da5febe --- /dev/null +++ b/node_modules/micromark/lib/constant/html-raw-names.mjs @@ -0,0 +1,2 @@ +// This module is copied from <https://spec.commonmark.org/0.29/#html-blocks>. +export default ['pre', 'script', 'style', 'textarea'] diff --git a/node_modules/micromark/lib/constant/splice.js b/node_modules/micromark/lib/constant/splice.js new file mode 100644 index 00000000..8917210a --- /dev/null +++ b/node_modules/micromark/lib/constant/splice.js @@ -0,0 +1,5 @@ +'use strict' + +var splice = [].splice + +module.exports = splice diff --git a/node_modules/micromark/lib/constant/splice.mjs b/node_modules/micromark/lib/constant/splice.mjs new file mode 100644 index 00000000..482404dd --- /dev/null +++ b/node_modules/micromark/lib/constant/splice.mjs @@ -0,0 +1 @@ +export default [].splice diff --git a/node_modules/micromark/lib/constant/types.d.ts b/node_modules/micromark/lib/constant/types.d.ts new file mode 100644 index 00000000..da294cab --- /dev/null +++ b/node_modules/micromark/lib/constant/types.d.ts @@ -0,0 +1,114 @@ +// This module is generated by `script/`. + +export type Type = string + +// @for-script: REMOVE_ALL_THING_BELOW + +export interface Types { + data: 'data' + whitespace: 'whitespace' + lineEnding: 'lineEnding' + lineEndingBlank: 'lineEndingBlank' + linePrefix: 'linePrefix' + lineSuffix: 'lineSuffix' + atxHeading: 'atxHeading' + atxHeadingSequence: 'atxHeadingSequence' + atxHeadingText: 'atxHeadingText' + autolink: 'autolink' + autolinkEmail: 'autolinkEmail' + autolinkMarker: 'autolinkMarker' + autolinkProtocol: 'autolinkProtocol' + characterEscape: 'characterEscape' + characterEscapeValue: 'characterEscapeValue' + characterReference: 'characterReference' + characterReferenceMarker: 'characterReferenceMarker' + characterReferenceMarkerNumeric: 'characterReferenceMarkerNumeric' + characterReferenceMarkerHexadecimal: 'characterReferenceMarkerHexadecimal' + characterReferenceValue: 'characterReferenceValue' + codeFenced: 'codeFenced' + codeFencedFence: 'codeFencedFence' + codeFencedFenceSequence: 'codeFencedFenceSequence' + codeFencedFenceInfo: 'codeFencedFenceInfo' + codeFencedFenceMeta: 'codeFencedFenceMeta' + codeFlowValue: 'codeFlowValue' + codeIndented: 'codeIndented' + codeText: 'codeText' + codeTextData: 'codeTextData' + codeTextPadding: 'codeTextPadding' + codeTextSequence: 'codeTextSequence' + content: 'content' + definition: 'definition' + definitionDestination: 'definitionDestination' + definitionDestinationLiteral: 'definitionDestinationLiteral' + definitionDestinationLiteralMarker: 'definitionDestinationLiteralMarker' + definitionDestinationRaw: 'definitionDestinationRaw' + definitionDestinationString: 'definitionDestinationString' + definitionLabel: 'definitionLabel' + definitionLabelMarker: 'definitionLabelMarker' + definitionLabelString: 'definitionLabelString' + definitionMarker: 'definitionMarker' + definitionTitle: 'definitionTitle' + definitionTitleMarker: 'definitionTitleMarker' + definitionTitleString: 'definitionTitleString' + emphasis: 'emphasis' + emphasisSequence: 'emphasisSequence' + emphasisText: 'emphasisText' + escapeMarker: 'escapeMarker' + hardBreakEscape: 'hardBreakEscape' + hardBreakTrailing: 'hardBreakTrailing' + htmlFlow: 'htmlFlow' + htmlFlowData: 'htmlFlowData' + htmlText: 'htmlText' + htmlTextData: 'htmlTextData' + image: 'image' + label: 'label' + labelText: 'labelText' + labelLink: 'labelLink' + labelImage: 'labelImage' + labelMarker: 'labelMarker' + labelImageMarker: 'labelImageMarker' + labelEnd: 'labelEnd' + link: 'link' + paragraph: 'paragraph' + reference: 'reference' + referenceMarker: 'referenceMarker' + referenceString: 'referenceString' + resource: 'resource' + resourceDestination: 'resourceDestination' + resourceDestinationLiteral: 'resourceDestinationLiteral' + resourceDestinationLiteralMarker: 'resourceDestinationLiteralMarker' + resourceDestinationRaw: 'resourceDestinationRaw' + resourceDestinationString: 'resourceDestinationString' + resourceMarker: 'resourceMarker' + resourceTitle: 'resourceTitle' + resourceTitleMarker: 'resourceTitleMarker' + resourceTitleString: 'resourceTitleString' + setextHeading: 'setextHeading' + setextHeadingText: 'setextHeadingText' + setextHeadingLine: 'setextHeadingLine' + setextHeadingLineSequence: 'setextHeadingLineSequence' + strong: 'strong' + strongSequence: 'strongSequence' + strongText: 'strongText' + thematicBreak: 'thematicBreak' + thematicBreakSequence: 'thematicBreakSequence' + blockQuote: 'blockQuote' + blockQuotePrefix: 'blockQuotePrefix' + blockQuoteMarker: 'blockQuoteMarker' + blockQuotePrefixWhitespace: 'blockQuotePrefixWhitespace' + listOrdered: 'listOrdered' + listUnordered: 'listUnordered' + listItemIndent: 'listItemIndent' + listItemMarker: 'listItemMarker' + listItemPrefix: 'listItemPrefix' + listItemPrefixWhitespace: 'listItemPrefixWhitespace' + listItemValue: 'listItemValue' + chunkContent: 'chunkContent' + chunkFlow: 'chunkFlow' + chunkText: 'chunkText' + chunkString: 'chunkString' +} + +declare const value: Types + +export default value diff --git a/node_modules/micromark/lib/constant/types.js b/node_modules/micromark/lib/constant/types.js new file mode 100644 index 00000000..11d9e4ed --- /dev/null +++ b/node_modules/micromark/lib/constant/types.js @@ -0,0 +1,452 @@ +'use strict' + +// This module is compiled away! +// +// Here is the list of all types of tokens exposed by micromark, with a short +// explanation of what they include and where they are found. +// In picking names, generally, the rule is to be as explicit as possible +// instead of reusing names. +// For example, there is a `definitionDestination` and a `resourceDestination`, +// instead of one shared name. + +var types = { + // Generic type for data, such as in a title, a destination, etc. + data: 'data', + + // Generic type for syntactic whitespace (tabs, virtual spaces, spaces). + // Such as, between a fenced code fence and an info string. + whitespace: 'whitespace', + + // Generic type for line endings (line feed, carriage return, carriage return + + // line feed). + lineEnding: 'lineEnding', + + // A line ending, but ending a blank line. + lineEndingBlank: 'lineEndingBlank', + + // Generic type for whitespace (tabs, virtual spaces, spaces) at the start of a + // line. + linePrefix: 'linePrefix', + + // Generic type for whitespace (tabs, virtual spaces, spaces) at the end of a + // line. + lineSuffix: 'lineSuffix', + + // Whole ATX heading: + // + // ```markdown + // # + // ## Alpha + // ### Bravo ### + // ``` + // + // Includes `atxHeadingSequence`, `whitespace`, `atxHeadingText`. + atxHeading: 'atxHeading', + + // Sequence of number signs in an ATX heading (`###`). + atxHeadingSequence: 'atxHeadingSequence', + + // Content in an ATX heading (`alpha`). + // Includes text. + atxHeadingText: 'atxHeadingText', + + // Whole autolink (`<https://example.com>` or `<admin@example.com>`) + // Includes `autolinkMarker` and `autolinkProtocol` or `autolinkEmail`. + autolink: 'autolink', + + // Email autolink w/o markers (`admin@example.com`) + autolinkEmail: 'autolinkEmail', + + // Marker around an `autolinkProtocol` or `autolinkEmail` (`<` or `>`). + autolinkMarker: 'autolinkMarker', + + // Protocol autolink w/o markers (`https://example.com`) + autolinkProtocol: 'autolinkProtocol', + + // A whole character escape (`\-`). + // Includes `escapeMarker` and `characterEscapeValue`. + characterEscape: 'characterEscape', + + // The escaped character (`-`). + characterEscapeValue: 'characterEscapeValue', + + // A whole character reference (`&amp;`, `&#8800;`, or `&#x1D306;`). + // Includes `characterReferenceMarker`, an optional + // `characterReferenceMarkerNumeric`, in which case an optional + // `characterReferenceMarkerHexadecimal`, and a `characterReferenceValue`. + characterReference: 'characterReference', + + // The start or end marker (`&` or `;`). + characterReferenceMarker: 'characterReferenceMarker', + + // Mark reference as numeric (`#`). + characterReferenceMarkerNumeric: 'characterReferenceMarkerNumeric', + + // Mark reference as numeric (`x` or `X`). + characterReferenceMarkerHexadecimal: 'characterReferenceMarkerHexadecimal', + + // Value of character reference w/o markers (`amp`, `8800`, or `1D306`). + characterReferenceValue: 'characterReferenceValue', + + // Whole fenced code: + // + // ````markdown + // ```js + // alert(1) + // ``` + // ```` + codeFenced: 'codeFenced', + + // A fenced code fence, including whitespace, sequence, info, and meta + // (` ```js `). + codeFencedFence: 'codeFencedFence', + + // Sequence of grave accent or tilde characters (` ``` `) in a fence. + codeFencedFenceSequence: 'codeFencedFenceSequence', + + // Info word (`js`) in a fence. + // Includes string. + codeFencedFenceInfo: 'codeFencedFenceInfo', + + // Meta words (`highlight="1"`) in a fence. + // Includes string. + codeFencedFenceMeta: 'codeFencedFenceMeta', + + // A line of code. + codeFlowValue: 'codeFlowValue', + + // Whole indented code: + // + // ```markdown + // alert(1) + // ``` + // + // Includes `lineEnding`, `linePrefix`, and `codeFlowValue`. + codeIndented: 'codeIndented', + + // A text code (``` `alpha` ```). + // Includes `codeTextSequence`, `codeTextData`, `lineEnding`, and can include + // `codeTextPadding`. + codeText: 'codeText', + + codeTextData: 'codeTextData', + + // A space or line ending right after or before a tick. + codeTextPadding: 'codeTextPadding', + + // A text code fence (` `` `). + codeTextSequence: 'codeTextSequence', + + // Whole content: + // + // ```markdown + // [a]: b + // c + // = + // d + // ``` + // + // Includes `paragraph` and `definition`. + content: 'content', + // Whole definition: + // + // ```markdown + // [micromark]: https://github.com/micromark/micromark + // ``` + // + // Includes `definitionLabel`, `definitionMarker`, `whitespace`, + // `definitionDestination`, and optionally `lineEnding` and `definitionTitle`. + definition: 'definition', + + // Destination of a definition (`https://github.com/micromark/micromark` or + // `<https://github.com/micromark/micromark>`). + // Includes `definitionDestinationLiteral` or `definitionDestinationRaw`. + definitionDestination: 'definitionDestination', + + // Enclosed destination of a definition + // (`<https://github.com/micromark/micromark>`). + // Includes `definitionDestinationLiteralMarker` and optionally + // `definitionDestinationString`. + definitionDestinationLiteral: 'definitionDestinationLiteral', + + // Markers of an enclosed definition destination (`<` or `>`). + definitionDestinationLiteralMarker: 'definitionDestinationLiteralMarker', + + // Unenclosed destination of a definition + // (`https://github.com/micromark/micromark`). + // Includes `definitionDestinationString`. + definitionDestinationRaw: 'definitionDestinationRaw', + + // Text in an destination (`https://github.com/micromark/micromark`). + // Includes string. + definitionDestinationString: 'definitionDestinationString', + + // Label of a definition (`[micromark]`). + // Includes `definitionLabelMarker` and `definitionLabelString`. + definitionLabel: 'definitionLabel', + + // Markers of a definition label (`[` or `]`). + definitionLabelMarker: 'definitionLabelMarker', + + // Value of a definition label (`micromark`). + // Includes string. + definitionLabelString: 'definitionLabelString', + + // Marker between a label and a destination (`:`). + definitionMarker: 'definitionMarker', + + // Title of a definition (`"x"`, `'y'`, or `(z)`). + // Includes `definitionTitleMarker` and optionally `definitionTitleString`. + definitionTitle: 'definitionTitle', + + // Marker around a title of a definition (`"`, `'`, `(`, or `)`). + definitionTitleMarker: 'definitionTitleMarker', + + // Data without markers in a title (`z`). + // Includes string. + definitionTitleString: 'definitionTitleString', + + // Emphasis (`*alpha*`). + // Includes `emphasisSequence` and `emphasisText`. + emphasis: 'emphasis', + + // Sequence of emphasis markers (`*` or `_`). + emphasisSequence: 'emphasisSequence', + + // Emphasis text (`alpha`). + // Includes text. + emphasisText: 'emphasisText', + + // The character escape marker (`\`). + escapeMarker: 'escapeMarker', + + // A hard break created with a backslash (`\\n`). + // Includes `escapeMarker` (does not include the line ending) + hardBreakEscape: 'hardBreakEscape', + + // A hard break created with trailing spaces (` \n`). + // Does not include the line ending. + hardBreakTrailing: 'hardBreakTrailing', + + // Flow HTML: + // + // ```markdown + // <div + // ``` + // + // Inlcudes `lineEnding`, `htmlFlowData`. + htmlFlow: 'htmlFlow', + + htmlFlowData: 'htmlFlowData', + + // HTML in text (the tag in `a <i> b`). + // Includes `lineEnding`, `htmlTextData`. + htmlText: 'htmlText', + + htmlTextData: 'htmlTextData', + + // Whole image (`![alpha](bravo)`, `![alpha][bravo]`, `![alpha][]`, or + // `![alpha]`). + // Includes `label` and an optional `resource` or `reference`. + image: 'image', + + // Whole link label (`[*alpha*]`). + // Includes `labelLink` or `labelImage`, `labelText`, and `labelEnd`. + label: 'label', + + // Text in an label (`*alpha*`). + // Includes text. + labelText: 'labelText', + + // Start a link label (`[`). + // Includes a `labelMarker`. + labelLink: 'labelLink', + + // Start an image label (`![`). + // Includes `labelImageMarker` and `labelMarker`. + labelImage: 'labelImage', + + // Marker of a label (`[` or `]`). + labelMarker: 'labelMarker', + + // Marker to start an image (`!`). + labelImageMarker: 'labelImageMarker', + + // End a label (`]`). + // Includes `labelMarker`. + labelEnd: 'labelEnd', + + // Whole link (`[alpha](bravo)`, `[alpha][bravo]`, `[alpha][]`, or `[alpha]`). + // Includes `label` and an optional `resource` or `reference`. + link: 'link', + + // Whole paragraph: + // + // ```markdown + // alpha + // bravo. + // ``` + // + // Includes text. + paragraph: 'paragraph', + + // A reference (`[alpha]` or `[]`). + // Includes `referenceMarker` and an optional `referenceString`. + reference: 'reference', + + // A reference marker (`[` or `]`). + referenceMarker: 'referenceMarker', + + // Reference text (`alpha`). + // Includes string. + referenceString: 'referenceString', + + // A resource (`(https://example.com "alpha")`). + // Includes `resourceMarker`, an optional `resourceDestination` with an optional + // `whitespace` and `resourceTitle`. + resource: 'resource', + + // A resource destination (`https://example.com`). + // Includes `resourceDestinationLiteral` or `resourceDestinationRaw`. + resourceDestination: 'resourceDestination', + + // A literal resource destination (`<https://example.com>`). + // Includes `resourceDestinationLiteralMarker` and optionally + // `resourceDestinationString`. + resourceDestinationLiteral: 'resourceDestinationLiteral', + + // A resource destination marker (`<` or `>`). + resourceDestinationLiteralMarker: 'resourceDestinationLiteralMarker', + + // A raw resource destination (`https://example.com`). + // Includes `resourceDestinationString`. + resourceDestinationRaw: 'resourceDestinationRaw', + + // Resource destination text (`https://example.com`). + // Includes string. + resourceDestinationString: 'resourceDestinationString', + + // A resource marker (`(` or `)`). + resourceMarker: 'resourceMarker', + + // A resource title (`"alpha"`, `'alpha'`, or `(alpha)`). + // Includes `resourceTitleMarker` and optionally `resourceTitleString`. + resourceTitle: 'resourceTitle', + + // A resource title marker (`"`, `'`, `(`, or `)`). + resourceTitleMarker: 'resourceTitleMarker', + + // Resource destination title (`alpha`). + // Includes string. + resourceTitleString: 'resourceTitleString', + + // Whole setext heading: + // + // ```markdown + // alpha + // bravo + // ===== + // ``` + // + // Includes `setextHeadingText`, `lineEnding`, `linePrefix`, and + // `setextHeadingLine`. + setextHeading: 'setextHeading', + + // Content in a setext heading (`alpha\nbravo`). + // Includes text. + setextHeadingText: 'setextHeadingText', + + // Underline in a setext heading, including whitespace suffix (`==`). + // Includes `setextHeadingLineSequence`. + setextHeadingLine: 'setextHeadingLine', + + // Sequence of equals or dash characters in underline in a setext heading (`-`). + setextHeadingLineSequence: 'setextHeadingLineSequence', + + // Strong (`**alpha**`). + // Includes `strongSequence` and `strongText`. + strong: 'strong', + + // Sequence of strong markers (`**` or `__`). + strongSequence: 'strongSequence', + + // Strong text (`alpha`). + // Includes text. + strongText: 'strongText', + + // Whole thematic break: + // + // ```markdown + // * * * + // ``` + // + // Includes `thematicBreakSequence` and `whitespace`. + thematicBreak: 'thematicBreak', + + // A sequence of one or more thematic break markers (`***`). + thematicBreakSequence: 'thematicBreakSequence', + + // Whole block quote: + // + // ```markdown + // > a + // > + // > b + // ``` + // + // Includes `blockQuotePrefix` and flow. + blockQuote: 'blockQuote', + // The `>` or `> ` of a block quote. + blockQuotePrefix: 'blockQuotePrefix', + // The `>` of a block quote prefix. + blockQuoteMarker: 'blockQuoteMarker', + // The optional ` ` of a block quote prefix. + blockQuotePrefixWhitespace: 'blockQuotePrefixWhitespace', + + // Whole unordered list: + // + // ```markdown + // - a + // b + // ``` + // + // Includes `listItemPrefix`, flow, and optionally `listItemIndent` on further + // lines. + listOrdered: 'listOrdered', + + // Whole ordered list: + // + // ```markdown + // 1. a + // b + // ``` + // + // Includes `listItemPrefix`, flow, and optionally `listItemIndent` on further + // lines. + listUnordered: 'listUnordered', + + // The indent of further list item lines. + listItemIndent: 'listItemIndent', + + // A marker, as in, `*`, `+`, `-`, `.`, or `)`. + listItemMarker: 'listItemMarker', + + // The thing that starts a list item, such as `1. `. + // Includes `listItemValue` if ordered, `listItemMarker`, and + // `listItemPrefixWhitespace` (unless followed by a line ending). + listItemPrefix: 'listItemPrefix', + + // The whitespace after a marker. + listItemPrefixWhitespace: 'listItemPrefixWhitespace', + + // The numerical value of an ordered item. + listItemValue: 'listItemValue', + + // Internal types used for subtokenizers, compiled away + chunkContent: 'chunkContent', + chunkFlow: 'chunkFlow', + chunkText: 'chunkText', + chunkString: 'chunkString' +} + +module.exports = types diff --git a/node_modules/micromark/lib/constant/types.mjs b/node_modules/micromark/lib/constant/types.mjs new file mode 100644 index 00000000..d39d9790 --- /dev/null +++ b/node_modules/micromark/lib/constant/types.mjs @@ -0,0 +1,448 @@ +// This module is compiled away! +// +// Here is the list of all types of tokens exposed by micromark, with a short +// explanation of what they include and where they are found. +// In picking names, generally, the rule is to be as explicit as possible +// instead of reusing names. +// For example, there is a `definitionDestination` and a `resourceDestination`, +// instead of one shared name. + +export default { + // Generic type for data, such as in a title, a destination, etc. + data: 'data', + + // Generic type for syntactic whitespace (tabs, virtual spaces, spaces). + // Such as, between a fenced code fence and an info string. + whitespace: 'whitespace', + + // Generic type for line endings (line feed, carriage return, carriage return + + // line feed). + lineEnding: 'lineEnding', + + // A line ending, but ending a blank line. + lineEndingBlank: 'lineEndingBlank', + + // Generic type for whitespace (tabs, virtual spaces, spaces) at the start of a + // line. + linePrefix: 'linePrefix', + + // Generic type for whitespace (tabs, virtual spaces, spaces) at the end of a + // line. + lineSuffix: 'lineSuffix', + + // Whole ATX heading: + // + // ```markdown + // # + // ## Alpha + // ### Bravo ### + // ``` + // + // Includes `atxHeadingSequence`, `whitespace`, `atxHeadingText`. + atxHeading: 'atxHeading', + + // Sequence of number signs in an ATX heading (`###`). + atxHeadingSequence: 'atxHeadingSequence', + + // Content in an ATX heading (`alpha`). + // Includes text. + atxHeadingText: 'atxHeadingText', + + // Whole autolink (`<https://example.com>` or `<admin@example.com>`) + // Includes `autolinkMarker` and `autolinkProtocol` or `autolinkEmail`. + autolink: 'autolink', + + // Email autolink w/o markers (`admin@example.com`) + autolinkEmail: 'autolinkEmail', + + // Marker around an `autolinkProtocol` or `autolinkEmail` (`<` or `>`). + autolinkMarker: 'autolinkMarker', + + // Protocol autolink w/o markers (`https://example.com`) + autolinkProtocol: 'autolinkProtocol', + + // A whole character escape (`\-`). + // Includes `escapeMarker` and `characterEscapeValue`. + characterEscape: 'characterEscape', + + // The escaped character (`-`). + characterEscapeValue: 'characterEscapeValue', + + // A whole character reference (`&amp;`, `&#8800;`, or `&#x1D306;`). + // Includes `characterReferenceMarker`, an optional + // `characterReferenceMarkerNumeric`, in which case an optional + // `characterReferenceMarkerHexadecimal`, and a `characterReferenceValue`. + characterReference: 'characterReference', + + // The start or end marker (`&` or `;`). + characterReferenceMarker: 'characterReferenceMarker', + + // Mark reference as numeric (`#`). + characterReferenceMarkerNumeric: 'characterReferenceMarkerNumeric', + + // Mark reference as numeric (`x` or `X`). + characterReferenceMarkerHexadecimal: 'characterReferenceMarkerHexadecimal', + + // Value of character reference w/o markers (`amp`, `8800`, or `1D306`). + characterReferenceValue: 'characterReferenceValue', + + // Whole fenced code: + // + // ````markdown + // ```js + // alert(1) + // ``` + // ```` + codeFenced: 'codeFenced', + + // A fenced code fence, including whitespace, sequence, info, and meta + // (` ```js `). + codeFencedFence: 'codeFencedFence', + + // Sequence of grave accent or tilde characters (` ``` `) in a fence. + codeFencedFenceSequence: 'codeFencedFenceSequence', + + // Info word (`js`) in a fence. + // Includes string. + codeFencedFenceInfo: 'codeFencedFenceInfo', + + // Meta words (`highlight="1"`) in a fence. + // Includes string. + codeFencedFenceMeta: 'codeFencedFenceMeta', + + // A line of code. + codeFlowValue: 'codeFlowValue', + + // Whole indented code: + // + // ```markdown + // alert(1) + // ``` + // + // Includes `lineEnding`, `linePrefix`, and `codeFlowValue`. + codeIndented: 'codeIndented', + + // A text code (``` `alpha` ```). + // Includes `codeTextSequence`, `codeTextData`, `lineEnding`, and can include + // `codeTextPadding`. + codeText: 'codeText', + + codeTextData: 'codeTextData', + + // A space or line ending right after or before a tick. + codeTextPadding: 'codeTextPadding', + + // A text code fence (` `` `). + codeTextSequence: 'codeTextSequence', + + // Whole content: + // + // ```markdown + // [a]: b + // c + // = + // d + // ``` + // + // Includes `paragraph` and `definition`. + content: 'content', + // Whole definition: + // + // ```markdown + // [micromark]: https://github.com/micromark/micromark + // ``` + // + // Includes `definitionLabel`, `definitionMarker`, `whitespace`, + // `definitionDestination`, and optionally `lineEnding` and `definitionTitle`. + definition: 'definition', + + // Destination of a definition (`https://github.com/micromark/micromark` or + // `<https://github.com/micromark/micromark>`). + // Includes `definitionDestinationLiteral` or `definitionDestinationRaw`. + definitionDestination: 'definitionDestination', + + // Enclosed destination of a definition + // (`<https://github.com/micromark/micromark>`). + // Includes `definitionDestinationLiteralMarker` and optionally + // `definitionDestinationString`. + definitionDestinationLiteral: 'definitionDestinationLiteral', + + // Markers of an enclosed definition destination (`<` or `>`). + definitionDestinationLiteralMarker: 'definitionDestinationLiteralMarker', + + // Unenclosed destination of a definition + // (`https://github.com/micromark/micromark`). + // Includes `definitionDestinationString`. + definitionDestinationRaw: 'definitionDestinationRaw', + + // Text in an destination (`https://github.com/micromark/micromark`). + // Includes string. + definitionDestinationString: 'definitionDestinationString', + + // Label of a definition (`[micromark]`). + // Includes `definitionLabelMarker` and `definitionLabelString`. + definitionLabel: 'definitionLabel', + + // Markers of a definition label (`[` or `]`). + definitionLabelMarker: 'definitionLabelMarker', + + // Value of a definition label (`micromark`). + // Includes string. + definitionLabelString: 'definitionLabelString', + + // Marker between a label and a destination (`:`). + definitionMarker: 'definitionMarker', + + // Title of a definition (`"x"`, `'y'`, or `(z)`). + // Includes `definitionTitleMarker` and optionally `definitionTitleString`. + definitionTitle: 'definitionTitle', + + // Marker around a title of a definition (`"`, `'`, `(`, or `)`). + definitionTitleMarker: 'definitionTitleMarker', + + // Data without markers in a title (`z`). + // Includes string. + definitionTitleString: 'definitionTitleString', + + // Emphasis (`*alpha*`). + // Includes `emphasisSequence` and `emphasisText`. + emphasis: 'emphasis', + + // Sequence of emphasis markers (`*` or `_`). + emphasisSequence: 'emphasisSequence', + + // Emphasis text (`alpha`). + // Includes text. + emphasisText: 'emphasisText', + + // The character escape marker (`\`). + escapeMarker: 'escapeMarker', + + // A hard break created with a backslash (`\\n`). + // Includes `escapeMarker` (does not include the line ending) + hardBreakEscape: 'hardBreakEscape', + + // A hard break created with trailing spaces (` \n`). + // Does not include the line ending. + hardBreakTrailing: 'hardBreakTrailing', + + // Flow HTML: + // + // ```markdown + // <div + // ``` + // + // Inlcudes `lineEnding`, `htmlFlowData`. + htmlFlow: 'htmlFlow', + + htmlFlowData: 'htmlFlowData', + + // HTML in text (the tag in `a <i> b`). + // Includes `lineEnding`, `htmlTextData`. + htmlText: 'htmlText', + + htmlTextData: 'htmlTextData', + + // Whole image (`![alpha](bravo)`, `![alpha][bravo]`, `![alpha][]`, or + // `![alpha]`). + // Includes `label` and an optional `resource` or `reference`. + image: 'image', + + // Whole link label (`[*alpha*]`). + // Includes `labelLink` or `labelImage`, `labelText`, and `labelEnd`. + label: 'label', + + // Text in an label (`*alpha*`). + // Includes text. + labelText: 'labelText', + + // Start a link label (`[`). + // Includes a `labelMarker`. + labelLink: 'labelLink', + + // Start an image label (`![`). + // Includes `labelImageMarker` and `labelMarker`. + labelImage: 'labelImage', + + // Marker of a label (`[` or `]`). + labelMarker: 'labelMarker', + + // Marker to start an image (`!`). + labelImageMarker: 'labelImageMarker', + + // End a label (`]`). + // Includes `labelMarker`. + labelEnd: 'labelEnd', + + // Whole link (`[alpha](bravo)`, `[alpha][bravo]`, `[alpha][]`, or `[alpha]`). + // Includes `label` and an optional `resource` or `reference`. + link: 'link', + + // Whole paragraph: + // + // ```markdown + // alpha + // bravo. + // ``` + // + // Includes text. + paragraph: 'paragraph', + + // A reference (`[alpha]` or `[]`). + // Includes `referenceMarker` and an optional `referenceString`. + reference: 'reference', + + // A reference marker (`[` or `]`). + referenceMarker: 'referenceMarker', + + // Reference text (`alpha`). + // Includes string. + referenceString: 'referenceString', + + // A resource (`(https://example.com "alpha")`). + // Includes `resourceMarker`, an optional `resourceDestination` with an optional + // `whitespace` and `resourceTitle`. + resource: 'resource', + + // A resource destination (`https://example.com`). + // Includes `resourceDestinationLiteral` or `resourceDestinationRaw`. + resourceDestination: 'resourceDestination', + + // A literal resource destination (`<https://example.com>`). + // Includes `resourceDestinationLiteralMarker` and optionally + // `resourceDestinationString`. + resourceDestinationLiteral: 'resourceDestinationLiteral', + + // A resource destination marker (`<` or `>`). + resourceDestinationLiteralMarker: 'resourceDestinationLiteralMarker', + + // A raw resource destination (`https://example.com`). + // Includes `resourceDestinationString`. + resourceDestinationRaw: 'resourceDestinationRaw', + + // Resource destination text (`https://example.com`). + // Includes string. + resourceDestinationString: 'resourceDestinationString', + + // A resource marker (`(` or `)`). + resourceMarker: 'resourceMarker', + + // A resource title (`"alpha"`, `'alpha'`, or `(alpha)`). + // Includes `resourceTitleMarker` and optionally `resourceTitleString`. + resourceTitle: 'resourceTitle', + + // A resource title marker (`"`, `'`, `(`, or `)`). + resourceTitleMarker: 'resourceTitleMarker', + + // Resource destination title (`alpha`). + // Includes string. + resourceTitleString: 'resourceTitleString', + + // Whole setext heading: + // + // ```markdown + // alpha + // bravo + // ===== + // ``` + // + // Includes `setextHeadingText`, `lineEnding`, `linePrefix`, and + // `setextHeadingLine`. + setextHeading: 'setextHeading', + + // Content in a setext heading (`alpha\nbravo`). + // Includes text. + setextHeadingText: 'setextHeadingText', + + // Underline in a setext heading, including whitespace suffix (`==`). + // Includes `setextHeadingLineSequence`. + setextHeadingLine: 'setextHeadingLine', + + // Sequence of equals or dash characters in underline in a setext heading (`-`). + setextHeadingLineSequence: 'setextHeadingLineSequence', + + // Strong (`**alpha**`). + // Includes `strongSequence` and `strongText`. + strong: 'strong', + + // Sequence of strong markers (`**` or `__`). + strongSequence: 'strongSequence', + + // Strong text (`alpha`). + // Includes text. + strongText: 'strongText', + + // Whole thematic break: + // + // ```markdown + // * * * + // ``` + // + // Includes `thematicBreakSequence` and `whitespace`. + thematicBreak: 'thematicBreak', + + // A sequence of one or more thematic break markers (`***`). + thematicBreakSequence: 'thematicBreakSequence', + + // Whole block quote: + // + // ```markdown + // > a + // > + // > b + // ``` + // + // Includes `blockQuotePrefix` and flow. + blockQuote: 'blockQuote', + // The `>` or `> ` of a block quote. + blockQuotePrefix: 'blockQuotePrefix', + // The `>` of a block quote prefix. + blockQuoteMarker: 'blockQuoteMarker', + // The optional ` ` of a block quote prefix. + blockQuotePrefixWhitespace: 'blockQuotePrefixWhitespace', + + // Whole unordered list: + // + // ```markdown + // - a + // b + // ``` + // + // Includes `listItemPrefix`, flow, and optionally `listItemIndent` on further + // lines. + listOrdered: 'listOrdered', + + // Whole ordered list: + // + // ```markdown + // 1. a + // b + // ``` + // + // Includes `listItemPrefix`, flow, and optionally `listItemIndent` on further + // lines. + listUnordered: 'listUnordered', + + // The indent of further list item lines. + listItemIndent: 'listItemIndent', + + // A marker, as in, `*`, `+`, `-`, `.`, or `)`. + listItemMarker: 'listItemMarker', + + // The thing that starts a list item, such as `1. `. + // Includes `listItemValue` if ordered, `listItemMarker`, and + // `listItemPrefixWhitespace` (unless followed by a line ending). + listItemPrefix: 'listItemPrefix', + + // The whitespace after a marker. + listItemPrefixWhitespace: 'listItemPrefixWhitespace', + + // The numerical value of an ordered item. + listItemValue: 'listItemValue', + + // Internal types used for subtokenizers, compiled away + chunkContent: 'chunkContent', + chunkFlow: 'chunkFlow', + chunkText: 'chunkText', + chunkString: 'chunkString' +} diff --git a/node_modules/micromark/lib/constant/unicode-punctuation-regex.js b/node_modules/micromark/lib/constant/unicode-punctuation-regex.js new file mode 100644 index 00000000..6d25ee4b --- /dev/null +++ b/node_modules/micromark/lib/constant/unicode-punctuation-regex.js @@ -0,0 +1,11 @@ +'use strict' + +// This module is generated by `script/`. +// +// CommonMark handles attention (emphasis, strong) markers based on what comes +// before or after them. +// One such difference is if those characters are Unicode punctuation. +// This script is generated from the Unicode data. +var unicodePunctuation = /[!-\/:-@\[-`\{-~\xA1\xA7\xAB\xB6\xB7\xBB\xBF\u037E\u0387\u055A-\u055F\u0589\u058A\u05BE\u05C0\u05C3\u05C6\u05F3\u05F4\u0609\u060A\u060C\u060D\u061B\u061E\u061F\u066A-\u066D\u06D4\u0700-\u070D\u07F7-\u07F9\u0830-\u083E\u085E\u0964\u0965\u0970\u09FD\u0A76\u0AF0\u0C77\u0C84\u0DF4\u0E4F\u0E5A\u0E5B\u0F04-\u0F12\u0F14\u0F3A-\u0F3D\u0F85\u0FD0-\u0FD4\u0FD9\u0FDA\u104A-\u104F\u10FB\u1360-\u1368\u1400\u166E\u169B\u169C\u16EB-\u16ED\u1735\u1736\u17D4-\u17D6\u17D8-\u17DA\u1800-\u180A\u1944\u1945\u1A1E\u1A1F\u1AA0-\u1AA6\u1AA8-\u1AAD\u1B5A-\u1B60\u1BFC-\u1BFF\u1C3B-\u1C3F\u1C7E\u1C7F\u1CC0-\u1CC7\u1CD3\u2010-\u2027\u2030-\u2043\u2045-\u2051\u2053-\u205E\u207D\u207E\u208D\u208E\u2308-\u230B\u2329\u232A\u2768-\u2775\u27C5\u27C6\u27E6-\u27EF\u2983-\u2998\u29D8-\u29DB\u29FC\u29FD\u2CF9-\u2CFC\u2CFE\u2CFF\u2D70\u2E00-\u2E2E\u2E30-\u2E4F\u2E52\u3001-\u3003\u3008-\u3011\u3014-\u301F\u3030\u303D\u30A0\u30FB\uA4FE\uA4FF\uA60D-\uA60F\uA673\uA67E\uA6F2-\uA6F7\uA874-\uA877\uA8CE\uA8CF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA95F\uA9C1-\uA9CD\uA9DE\uA9DF\uAA5C-\uAA5F\uAADE\uAADF\uAAF0\uAAF1\uABEB\uFD3E\uFD3F\uFE10-\uFE19\uFE30-\uFE52\uFE54-\uFE61\uFE63\uFE68\uFE6A\uFE6B\uFF01-\uFF03\uFF05-\uFF0A\uFF0C-\uFF0F\uFF1A\uFF1B\uFF1F\uFF20\uFF3B-\uFF3D\uFF3F\uFF5B\uFF5D\uFF5F-\uFF65]/ + +module.exports = unicodePunctuation diff --git a/node_modules/micromark/lib/constant/unicode-punctuation-regex.mjs b/node_modules/micromark/lib/constant/unicode-punctuation-regex.mjs new file mode 100644 index 00000000..3b6ac3f1 --- /dev/null +++ b/node_modules/micromark/lib/constant/unicode-punctuation-regex.mjs @@ -0,0 +1,7 @@ +// This module is generated by `script/`. +// +// CommonMark handles attention (emphasis, strong) markers based on what comes +// before or after them. +// One such difference is if those characters are Unicode punctuation. +// This script is generated from the Unicode data. +export default /[!-/:-@[-`{-~\u00A1\u00A7\u00AB\u00B6\u00B7\u00BB\u00BF\u037E\u0387\u055A-\u055F\u0589\u058A\u05BE\u05C0\u05C3\u05C6\u05F3\u05F4\u0609\u060A\u060C\u060D\u061B\u061E\u061F\u066A-\u066D\u06D4\u0700-\u070D\u07F7-\u07F9\u0830-\u083E\u085E\u0964\u0965\u0970\u09FD\u0A76\u0AF0\u0C77\u0C84\u0DF4\u0E4F\u0E5A\u0E5B\u0F04-\u0F12\u0F14\u0F3A-\u0F3D\u0F85\u0FD0-\u0FD4\u0FD9\u0FDA\u104A-\u104F\u10FB\u1360-\u1368\u1400\u166E\u169B\u169C\u16EB-\u16ED\u1735\u1736\u17D4-\u17D6\u17D8-\u17DA\u1800-\u180A\u1944\u1945\u1A1E\u1A1F\u1AA0-\u1AA6\u1AA8-\u1AAD\u1B5A-\u1B60\u1BFC-\u1BFF\u1C3B-\u1C3F\u1C7E\u1C7F\u1CC0-\u1CC7\u1CD3\u2010-\u2027\u2030-\u2043\u2045-\u2051\u2053-\u205E\u207D\u207E\u208D\u208E\u2308-\u230B\u2329\u232A\u2768-\u2775\u27C5\u27C6\u27E6-\u27EF\u2983-\u2998\u29D8-\u29DB\u29FC\u29FD\u2CF9-\u2CFC\u2CFE\u2CFF\u2D70\u2E00-\u2E2E\u2E30-\u2E4F\u2E52\u3001-\u3003\u3008-\u3011\u3014-\u301F\u3030\u303D\u30A0\u30FB\uA4FE\uA4FF\uA60D-\uA60F\uA673\uA67E\uA6F2-\uA6F7\uA874-\uA877\uA8CE\uA8CF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA95F\uA9C1-\uA9CD\uA9DE\uA9DF\uAA5C-\uAA5F\uAADE\uAADF\uAAF0\uAAF1\uABEB\uFD3E\uFD3F\uFE10-\uFE19\uFE30-\uFE52\uFE54-\uFE61\uFE63\uFE68\uFE6A\uFE6B\uFF01-\uFF03\uFF05-\uFF0A\uFF0C-\uFF0F\uFF1A\uFF1B\uFF1F\uFF20\uFF3B-\uFF3D\uFF3F\uFF5B\uFF5D\uFF5F-\uFF65]/ diff --git a/node_modules/micromark/lib/constructs.js b/node_modules/micromark/lib/constructs.js new file mode 100644 index 00000000..d9e5ae1b --- /dev/null +++ b/node_modules/micromark/lib/constructs.js @@ -0,0 +1,98 @@ +'use strict' + +Object.defineProperty(exports, '__esModule', {value: true}) + +var text$1 = require('./initialize/text.js') +var attention = require('./tokenize/attention.js') +var autolink = require('./tokenize/autolink.js') +var blockQuote = require('./tokenize/block-quote.js') +var characterEscape = require('./tokenize/character-escape.js') +var characterReference = require('./tokenize/character-reference.js') +var codeFenced = require('./tokenize/code-fenced.js') +var codeIndented = require('./tokenize/code-indented.js') +var codeText = require('./tokenize/code-text.js') +var definition = require('./tokenize/definition.js') +var hardBreakEscape = require('./tokenize/hard-break-escape.js') +var headingAtx = require('./tokenize/heading-atx.js') +var htmlFlow = require('./tokenize/html-flow.js') +var htmlText = require('./tokenize/html-text.js') +var labelEnd = require('./tokenize/label-end.js') +var labelStartImage = require('./tokenize/label-start-image.js') +var labelStartLink = require('./tokenize/label-start-link.js') +var lineEnding = require('./tokenize/line-ending.js') +var list = require('./tokenize/list.js') +var setextUnderline = require('./tokenize/setext-underline.js') +var thematicBreak = require('./tokenize/thematic-break.js') + +var document = { + 42: list, // Asterisk + 43: list, // Plus sign + 45: list, // Dash + 48: list, // 0 + 49: list, // 1 + 50: list, // 2 + 51: list, // 3 + 52: list, // 4 + 53: list, // 5 + 54: list, // 6 + 55: list, // 7 + 56: list, // 8 + 57: list, // 9 + 62: blockQuote // Greater than +} + +var contentInitial = { + 91: definition // Left square bracket +} + +var flowInitial = { + '-2': codeIndented, // Horizontal tab + '-1': codeIndented, // Virtual space + 32: codeIndented // Space +} + +var flow = { + 35: headingAtx, // Number sign + 42: thematicBreak, // Asterisk + 45: [setextUnderline, thematicBreak], // Dash + 60: htmlFlow, // Less than + 61: setextUnderline, // Equals to + 95: thematicBreak, // Underscore + 96: codeFenced, // Grave accent + 126: codeFenced // Tilde +} + +var string = { + 38: characterReference, // Ampersand + 92: characterEscape // Backslash +} + +var text = { + '-5': lineEnding, // Carriage return + '-4': lineEnding, // Line feed + '-3': lineEnding, // Carriage return + line feed + 33: labelStartImage, // Exclamation mark + 38: characterReference, // Ampersand + 42: attention, // Asterisk + 60: [autolink, htmlText], // Less than + 91: labelStartLink, // Left square bracket + 92: [hardBreakEscape, characterEscape], // Backslash + 93: labelEnd, // Right square bracket + 95: attention, // Underscore + 96: codeText // Grave accent +} + +var insideSpan = { + null: [attention, text$1.resolver] +} + +var disable = {null: []} + +exports.contentInitial = contentInitial +exports.disable = disable +exports.document = document +exports.flow = flow +exports.flowInitial = flowInitial +exports.insideSpan = insideSpan +exports.string = string +exports.text = text diff --git a/node_modules/micromark/lib/constructs.mjs b/node_modules/micromark/lib/constructs.mjs new file mode 100644 index 00000000..e52c3df3 --- /dev/null +++ b/node_modules/micromark/lib/constructs.mjs @@ -0,0 +1,85 @@ +import {resolver as resolveText} from './initialize/text.mjs' +import attention from './tokenize/attention.mjs' +import autolink from './tokenize/autolink.mjs' +import blockQuote from './tokenize/block-quote.mjs' +import characterEscape from './tokenize/character-escape.mjs' +import characterReference from './tokenize/character-reference.mjs' +import codeFenced from './tokenize/code-fenced.mjs' +import codeIndented from './tokenize/code-indented.mjs' +import codeText from './tokenize/code-text.mjs' +import definition from './tokenize/definition.mjs' +import hardBreakEscape from './tokenize/hard-break-escape.mjs' +import headingAtx from './tokenize/heading-atx.mjs' +import htmlFlow from './tokenize/html-flow.mjs' +import htmlText from './tokenize/html-text.mjs' +import labelEnd from './tokenize/label-end.mjs' +import labelImage from './tokenize/label-start-image.mjs' +import labelLink from './tokenize/label-start-link.mjs' +import lineEnding from './tokenize/line-ending.mjs' +import list from './tokenize/list.mjs' +import setextUnderline from './tokenize/setext-underline.mjs' +import thematicBreak from './tokenize/thematic-break.mjs' + +export var document = { + 42: list, // Asterisk + 43: list, // Plus sign + 45: list, // Dash + 48: list, // 0 + 49: list, // 1 + 50: list, // 2 + 51: list, // 3 + 52: list, // 4 + 53: list, // 5 + 54: list, // 6 + 55: list, // 7 + 56: list, // 8 + 57: list, // 9 + 62: blockQuote // Greater than +} + +export var contentInitial = { + 91: definition // Left square bracket +} + +export var flowInitial = { + '-2': codeIndented, // Horizontal tab + '-1': codeIndented, // Virtual space + 32: codeIndented // Space +} + +export var flow = { + 35: headingAtx, // Number sign + 42: thematicBreak, // Asterisk + 45: [setextUnderline, thematicBreak], // Dash + 60: htmlFlow, // Less than + 61: setextUnderline, // Equals to + 95: thematicBreak, // Underscore + 96: codeFenced, // Grave accent + 126: codeFenced // Tilde +} + +export var string = { + 38: characterReference, // Ampersand + 92: characterEscape // Backslash +} + +export var text = { + '-5': lineEnding, // Carriage return + '-4': lineEnding, // Line feed + '-3': lineEnding, // Carriage return + line feed + 33: labelImage, // Exclamation mark + 38: characterReference, // Ampersand + 42: attention, // Asterisk + 60: [autolink, htmlText], // Less than + 91: labelLink, // Left square bracket + 92: [hardBreakEscape, characterEscape], // Backslash + 93: labelEnd, // Right square bracket + 95: attention, // Underscore + 96: codeText // Grave accent +} + +export var insideSpan = { + null: [attention, resolveText] +} + +export var disable = {null: []} diff --git a/node_modules/micromark/lib/index.d.ts b/node_modules/micromark/lib/index.d.ts new file mode 100644 index 00000000..aa009154 --- /dev/null +++ b/node_modules/micromark/lib/index.d.ts @@ -0,0 +1,11 @@ +import {Buffer, BufferEncoding, Options} from './shared-types' + +declare function buffer(value: string | Buffer, options?: Options): string + +declare function buffer( + value: string | Buffer, + encoding?: BufferEncoding, + options?: Options +): string + +export default buffer diff --git a/node_modules/micromark/lib/index.js b/node_modules/micromark/lib/index.js new file mode 100644 index 00000000..8b289a29 --- /dev/null +++ b/node_modules/micromark/lib/index.js @@ -0,0 +1,21 @@ +'use strict' + +var html = require('./compile/html.js') +var parse = require('./parse.js') +var postprocess = require('./postprocess.js') +var preprocess = require('./preprocess.js') + +function buffer(value, encoding, options) { + if (typeof encoding !== 'string') { + options = encoding + encoding = undefined + } + + return html(options)( + postprocess( + parse(options).document().write(preprocess()(value, encoding, true)) + ) + ) +} + +module.exports = buffer diff --git a/node_modules/micromark/lib/index.mjs b/node_modules/micromark/lib/index.mjs new file mode 100644 index 00000000..2f8db57c --- /dev/null +++ b/node_modules/micromark/lib/index.mjs @@ -0,0 +1,19 @@ +export default buffer + +import compiler from './compile/html.mjs' +import parser from './parse.mjs' +import postprocess from './postprocess.mjs' +import preprocessor from './preprocess.mjs' + +function buffer(value, encoding, options) { + if (typeof encoding !== 'string') { + options = encoding + encoding = undefined + } + + return compiler(options)( + postprocess( + parser(options).document().write(preprocessor()(value, encoding, true)) + ) + ) +} diff --git a/node_modules/micromark/lib/initialize/content.js b/node_modules/micromark/lib/initialize/content.js new file mode 100644 index 00000000..75922234 --- /dev/null +++ b/node_modules/micromark/lib/initialize/content.js @@ -0,0 +1,91 @@ +'use strict' + +Object.defineProperty(exports, '__esModule', {value: true}) + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var factorySpace = require('../tokenize/factory-space.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var tokenize = initializeContent + +function initializeContent(effects) { + var contentStart = effects.attempt( + this.parser.constructs.contentInitial, + afterContentStartConstruct, + paragraphInitial + ) + var previous + + return contentStart + + function afterContentStartConstruct(code) { + assert__default['default']( + code === codes.eof || markdownLineEnding(code), + 'expected eol or eof' + ) + + if (code === codes.eof) { + effects.consume(code) + return + } + + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return factorySpace(effects, contentStart, types.linePrefix) + } + + function paragraphInitial(code) { + assert__default['default']( + code !== codes.eof && !markdownLineEnding(code), + 'expected anything other than a line ending or EOF' + ) + effects.enter(types.paragraph) + return lineStart(code) + } + + function lineStart(code) { + var token = effects.enter(types.chunkText, { + contentType: constants.contentTypeText, + previous: previous + }) + + if (previous) { + previous.next = token + } + + previous = token + + return data(code) + } + + function data(code) { + if (code === codes.eof) { + effects.exit(types.chunkText) + effects.exit(types.paragraph) + effects.consume(code) + return + } + + if (markdownLineEnding(code)) { + effects.consume(code) + effects.exit(types.chunkText) + return lineStart + } + + // Data. + effects.consume(code) + return data + } +} + +exports.tokenize = tokenize diff --git a/node_modules/micromark/lib/initialize/content.mjs b/node_modules/micromark/lib/initialize/content.mjs new file mode 100644 index 00000000..73a9c413 --- /dev/null +++ b/node_modules/micromark/lib/initialize/content.mjs @@ -0,0 +1,79 @@ +export var tokenize = initializeContent + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import spaceFactory from '../tokenize/factory-space.mjs' + +function initializeContent(effects) { + var contentStart = effects.attempt( + this.parser.constructs.contentInitial, + afterContentStartConstruct, + paragraphInitial + ) + var previous + + return contentStart + + function afterContentStartConstruct(code) { + assert( + code === codes.eof || markdownLineEnding(code), + 'expected eol or eof' + ) + + if (code === codes.eof) { + effects.consume(code) + return + } + + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return spaceFactory(effects, contentStart, types.linePrefix) + } + + function paragraphInitial(code) { + assert( + code !== codes.eof && !markdownLineEnding(code), + 'expected anything other than a line ending or EOF' + ) + effects.enter(types.paragraph) + return lineStart(code) + } + + function lineStart(code) { + var token = effects.enter(types.chunkText, { + contentType: constants.contentTypeText, + previous: previous + }) + + if (previous) { + previous.next = token + } + + previous = token + + return data(code) + } + + function data(code) { + if (code === codes.eof) { + effects.exit(types.chunkText) + effects.exit(types.paragraph) + effects.consume(code) + return + } + + if (markdownLineEnding(code)) { + effects.consume(code) + effects.exit(types.chunkText) + return lineStart + } + + // Data. + effects.consume(code) + return data + } +} diff --git a/node_modules/micromark/lib/initialize/document.js b/node_modules/micromark/lib/initialize/document.js new file mode 100644 index 00000000..fae240f7 --- /dev/null +++ b/node_modules/micromark/lib/initialize/document.js @@ -0,0 +1,245 @@ +'use strict' + +Object.defineProperty(exports, '__esModule', {value: true}) + +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var factorySpace = require('../tokenize/factory-space.js') +var partialBlankLine = require('../tokenize/partial-blank-line.js') + +var tokenize = initializeDocument + +var containerConstruct = {tokenize: tokenizeContainer} +var lazyFlowConstruct = {tokenize: tokenizeLazyFlow} + +function initializeDocument(effects) { + var self = this + var stack = [] + var continued = 0 + var inspectConstruct = {tokenize: tokenizeInspect, partial: true} + var inspectResult + var childFlow + var childToken + + return start + + function start(code) { + if (continued < stack.length) { + self.containerState = stack[continued][1] + return effects.attempt( + stack[continued][0].continuation, + documentContinue, + documentContinued + )(code) + } + + return documentContinued(code) + } + + function documentContinue(code) { + continued++ + return start(code) + } + + function documentContinued(code) { + // If we’re in a concrete construct (such as when expecting another line of + // HTML, or we resulted in lazy content), we can immediately start flow. + if (inspectResult && inspectResult.flowContinue) { + return flowStart(code) + } + + self.interrupt = + childFlow && + childFlow.currentConstruct && + childFlow.currentConstruct.interruptible + self.containerState = {} + return effects.attempt( + containerConstruct, + containerContinue, + flowStart + )(code) + } + + function containerContinue(code) { + stack.push([self.currentConstruct, self.containerState]) + self.containerState = undefined + return documentContinued(code) + } + + function flowStart(code) { + if (code === codes.eof) { + exitContainers(0, true) + effects.consume(code) + return + } + + childFlow = childFlow || self.parser.flow(self.now()) + + effects.enter(types.chunkFlow, { + contentType: constants.contentTypeFlow, + previous: childToken, + _tokenizer: childFlow + }) + + return flowContinue(code) + } + + function flowContinue(code) { + if (code === codes.eof) { + continueFlow(effects.exit(types.chunkFlow)) + return flowStart(code) + } + + if (markdownLineEnding(code)) { + effects.consume(code) + continueFlow(effects.exit(types.chunkFlow)) + return effects.check(inspectConstruct, documentAfterPeek) + } + + effects.consume(code) + return flowContinue + } + + function documentAfterPeek(code) { + exitContainers( + inspectResult.continued, + inspectResult && inspectResult.flowEnd + ) + continued = 0 + return start(code) + } + + function continueFlow(token) { + if (childToken) childToken.next = token + childToken = token + childFlow.lazy = inspectResult && inspectResult.lazy + childFlow.defineSkip(token.start) + childFlow.write(self.sliceStream(token)) + } + + function exitContainers(size, end) { + var index = stack.length + + // Close the flow. + if (childFlow && end) { + childFlow.write([codes.eof]) + childToken = childFlow = undefined + } + + // Exit open containers. + while (index-- > size) { + self.containerState = stack[index][1] + stack[index][0].exit.call(self, effects) + } + + stack.length = size + } + + function tokenizeInspect(effects, ok) { + var subcontinued = 0 + + inspectResult = {} + + return inspectStart + + function inspectStart(code) { + if (subcontinued < stack.length) { + self.containerState = stack[subcontinued][1] + return effects.attempt( + stack[subcontinued][0].continuation, + inspectContinue, + inspectLess + )(code) + } + + // If we’re continued but in a concrete flow, we can’t have more + // containers. + if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) { + inspectResult.flowContinue = true + return inspectDone(code) + } + + self.interrupt = + childFlow.currentConstruct && childFlow.currentConstruct.interruptible + self.containerState = {} + return effects.attempt( + containerConstruct, + inspectFlowEnd, + inspectDone + )(code) + } + + function inspectContinue(code) { + subcontinued++ + return self.containerState._closeFlow + ? inspectFlowEnd(code) + : inspectStart(code) + } + + function inspectLess(code) { + if (childFlow.currentConstruct && childFlow.currentConstruct.lazy) { + // Maybe another container? + self.containerState = {} + return effects.attempt( + containerConstruct, + inspectFlowEnd, + // Maybe flow, or a blank line? + effects.attempt( + lazyFlowConstruct, + inspectFlowEnd, + effects.check(partialBlankLine, inspectFlowEnd, inspectLazy) + ) + )(code) + } + + // Otherwise we’re interrupting. + return inspectFlowEnd(code) + } + + function inspectLazy(code) { + // Act as if all containers are continued. + subcontinued = stack.length + inspectResult.lazy = true + inspectResult.flowContinue = true + return inspectDone(code) + } + + // We’re done with flow if we have more containers, or an interruption. + function inspectFlowEnd(code) { + inspectResult.flowEnd = true + return inspectDone(code) + } + + function inspectDone(code) { + inspectResult.continued = subcontinued + self.interrupt = self.containerState = undefined + return ok(code) + } + } +} + +function tokenizeContainer(effects, ok, nok) { + return factorySpace( + effects, + effects.attempt(this.parser.constructs.document, ok, nok), + types.linePrefix, + this.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + ) +} + +function tokenizeLazyFlow(effects, ok, nok) { + return factorySpace( + effects, + effects.lazy(this.parser.constructs.flow, ok, nok), + types.linePrefix, + this.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + ) +} + +exports.tokenize = tokenize diff --git a/node_modules/micromark/lib/initialize/document.mjs b/node_modules/micromark/lib/initialize/document.mjs new file mode 100644 index 00000000..9b084f3c --- /dev/null +++ b/node_modules/micromark/lib/initialize/document.mjs @@ -0,0 +1,239 @@ +export var tokenize = initializeDocument + +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import spaceFactory from '../tokenize/factory-space.mjs' +import blank from '../tokenize/partial-blank-line.mjs' + +var containerConstruct = {tokenize: tokenizeContainer} +var lazyFlowConstruct = {tokenize: tokenizeLazyFlow} + +function initializeDocument(effects) { + var self = this + var stack = [] + var continued = 0 + var inspectConstruct = {tokenize: tokenizeInspect, partial: true} + var inspectResult + var childFlow + var childToken + + return start + + function start(code) { + if (continued < stack.length) { + self.containerState = stack[continued][1] + return effects.attempt( + stack[continued][0].continuation, + documentContinue, + documentContinued + )(code) + } + + return documentContinued(code) + } + + function documentContinue(code) { + continued++ + return start(code) + } + + function documentContinued(code) { + // If we’re in a concrete construct (such as when expecting another line of + // HTML, or we resulted in lazy content), we can immediately start flow. + if (inspectResult && inspectResult.flowContinue) { + return flowStart(code) + } + + self.interrupt = + childFlow && + childFlow.currentConstruct && + childFlow.currentConstruct.interruptible + self.containerState = {} + return effects.attempt( + containerConstruct, + containerContinue, + flowStart + )(code) + } + + function containerContinue(code) { + stack.push([self.currentConstruct, self.containerState]) + self.containerState = undefined + return documentContinued(code) + } + + function flowStart(code) { + if (code === codes.eof) { + exitContainers(0, true) + effects.consume(code) + return + } + + childFlow = childFlow || self.parser.flow(self.now()) + + effects.enter(types.chunkFlow, { + contentType: constants.contentTypeFlow, + previous: childToken, + _tokenizer: childFlow + }) + + return flowContinue(code) + } + + function flowContinue(code) { + if (code === codes.eof) { + continueFlow(effects.exit(types.chunkFlow)) + return flowStart(code) + } + + if (markdownLineEnding(code)) { + effects.consume(code) + continueFlow(effects.exit(types.chunkFlow)) + return effects.check(inspectConstruct, documentAfterPeek) + } + + effects.consume(code) + return flowContinue + } + + function documentAfterPeek(code) { + exitContainers( + inspectResult.continued, + inspectResult && inspectResult.flowEnd + ) + continued = 0 + return start(code) + } + + function continueFlow(token) { + if (childToken) childToken.next = token + childToken = token + childFlow.lazy = inspectResult && inspectResult.lazy + childFlow.defineSkip(token.start) + childFlow.write(self.sliceStream(token)) + } + + function exitContainers(size, end) { + var index = stack.length + + // Close the flow. + if (childFlow && end) { + childFlow.write([codes.eof]) + childToken = childFlow = undefined + } + + // Exit open containers. + while (index-- > size) { + self.containerState = stack[index][1] + stack[index][0].exit.call(self, effects) + } + + stack.length = size + } + + function tokenizeInspect(effects, ok) { + var subcontinued = 0 + + inspectResult = {} + + return inspectStart + + function inspectStart(code) { + if (subcontinued < stack.length) { + self.containerState = stack[subcontinued][1] + return effects.attempt( + stack[subcontinued][0].continuation, + inspectContinue, + inspectLess + )(code) + } + + // If we’re continued but in a concrete flow, we can’t have more + // containers. + if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) { + inspectResult.flowContinue = true + return inspectDone(code) + } + + self.interrupt = + childFlow.currentConstruct && childFlow.currentConstruct.interruptible + self.containerState = {} + return effects.attempt( + containerConstruct, + inspectFlowEnd, + inspectDone + )(code) + } + + function inspectContinue(code) { + subcontinued++ + return self.containerState._closeFlow + ? inspectFlowEnd(code) + : inspectStart(code) + } + + function inspectLess(code) { + if (childFlow.currentConstruct && childFlow.currentConstruct.lazy) { + // Maybe another container? + self.containerState = {} + return effects.attempt( + containerConstruct, + inspectFlowEnd, + // Maybe flow, or a blank line? + effects.attempt( + lazyFlowConstruct, + inspectFlowEnd, + effects.check(blank, inspectFlowEnd, inspectLazy) + ) + )(code) + } + + // Otherwise we’re interrupting. + return inspectFlowEnd(code) + } + + function inspectLazy(code) { + // Act as if all containers are continued. + subcontinued = stack.length + inspectResult.lazy = true + inspectResult.flowContinue = true + return inspectDone(code) + } + + // We’re done with flow if we have more containers, or an interruption. + function inspectFlowEnd(code) { + inspectResult.flowEnd = true + return inspectDone(code) + } + + function inspectDone(code) { + inspectResult.continued = subcontinued + self.interrupt = self.containerState = undefined + return ok(code) + } + } +} + +function tokenizeContainer(effects, ok, nok) { + return spaceFactory( + effects, + effects.attempt(this.parser.constructs.document, ok, nok), + types.linePrefix, + this.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + ) +} + +function tokenizeLazyFlow(effects, ok, nok) { + return spaceFactory( + effects, + effects.lazy(this.parser.constructs.flow, ok, nok), + types.linePrefix, + this.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + ) +} diff --git a/node_modules/micromark/lib/initialize/flow.js b/node_modules/micromark/lib/initialize/flow.js new file mode 100644 index 00000000..2d71db26 --- /dev/null +++ b/node_modules/micromark/lib/initialize/flow.js @@ -0,0 +1,82 @@ +'use strict' + +Object.defineProperty(exports, '__esModule', {value: true}) + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var types = require('../constant/types.js') +var content = require('../tokenize/content.js') +var factorySpace = require('../tokenize/factory-space.js') +var partialBlankLine = require('../tokenize/partial-blank-line.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var tokenize = initializeFlow + +function initializeFlow(effects) { + var self = this + var initial = effects.attempt( + // Try to parse a blank line. + partialBlankLine, + atBlankEnding, + // Try to parse initial flow (essentially, only code). + effects.attempt( + this.parser.constructs.flowInitial, + afterConstruct, + factorySpace( + effects, + effects.attempt( + this.parser.constructs.flow, + afterConstruct, + effects.attempt(content, afterConstruct) + ), + types.linePrefix + ) + ) + ) + + return initial + + function atBlankEnding(code) { + assert__default['default']( + code === codes.eof || markdownLineEnding(code), + 'expected eol or eof' + ) + + if (code === codes.eof) { + effects.consume(code) + return + } + + effects.enter(types.lineEndingBlank) + effects.consume(code) + effects.exit(types.lineEndingBlank) + self.currentConstruct = undefined + return initial + } + + function afterConstruct(code) { + assert__default['default']( + code === codes.eof || markdownLineEnding(code), + 'expected eol or eof' + ) + + if (code === codes.eof) { + effects.consume(code) + return + } + + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + self.currentConstruct = undefined + return initial + } +} + +exports.tokenize = tokenize diff --git a/node_modules/micromark/lib/initialize/flow.mjs b/node_modules/micromark/lib/initialize/flow.mjs new file mode 100644 index 00000000..2f4b905d --- /dev/null +++ b/node_modules/micromark/lib/initialize/flow.mjs @@ -0,0 +1,70 @@ +export var tokenize = initializeFlow + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import types from '../constant/types.mjs' +import content from '../tokenize/content.mjs' +import spaceFactory from '../tokenize/factory-space.mjs' +import blank from '../tokenize/partial-blank-line.mjs' + +function initializeFlow(effects) { + var self = this + var initial = effects.attempt( + // Try to parse a blank line. + blank, + atBlankEnding, + // Try to parse initial flow (essentially, only code). + effects.attempt( + this.parser.constructs.flowInitial, + afterConstruct, + spaceFactory( + effects, + effects.attempt( + this.parser.constructs.flow, + afterConstruct, + effects.attempt(content, afterConstruct) + ), + types.linePrefix + ) + ) + ) + + return initial + + function atBlankEnding(code) { + assert( + code === codes.eof || markdownLineEnding(code), + 'expected eol or eof' + ) + + if (code === codes.eof) { + effects.consume(code) + return + } + + effects.enter(types.lineEndingBlank) + effects.consume(code) + effects.exit(types.lineEndingBlank) + self.currentConstruct = undefined + return initial + } + + function afterConstruct(code) { + assert( + code === codes.eof || markdownLineEnding(code), + 'expected eol or eof' + ) + + if (code === codes.eof) { + effects.consume(code) + return + } + + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + self.currentConstruct = undefined + return initial + } +} diff --git a/node_modules/micromark/lib/initialize/text.js b/node_modules/micromark/lib/initialize/text.js new file mode 100644 index 00000000..10274966 --- /dev/null +++ b/node_modules/micromark/lib/initialize/text.js @@ -0,0 +1,210 @@ +'use strict' + +Object.defineProperty(exports, '__esModule', {value: true}) + +var codes = require('../character/codes.js') +var assign = require('../constant/assign.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var shallow = require('../util/shallow.js') + +var text = initializeFactory('text') +var string = initializeFactory('string') +var resolver = {resolveAll: createResolver()} + +function initializeFactory(field) { + return { + tokenize: initializeText, + resolveAll: createResolver( + field === 'text' ? resolveAllLineSuffixes : undefined + ) + } + + function initializeText(effects) { + var self = this + var constructs = this.parser.constructs[field] + var text = effects.attempt(constructs, start, notText) + + return start + + function start(code) { + return atBreak(code) ? text(code) : notText(code) + } + + function notText(code) { + if (code === codes.eof) { + effects.consume(code) + return + } + + effects.enter(types.data) + effects.consume(code) + return data + } + + function data(code) { + if (atBreak(code)) { + effects.exit(types.data) + return text(code) + } + + // Data. + effects.consume(code) + return data + } + + function atBreak(code) { + var list = constructs[code] + var index = -1 + + if (code === codes.eof) { + return true + } + + if (list) { + while (++index < list.length) { + if ( + !list[index].previous || + list[index].previous.call(self, self.previous) + ) { + return true + } + } + } + } + } +} + +function createResolver(extraResolver) { + return resolveAllText + + function resolveAllText(events, context) { + var index = -1 + var enter + + // A rather boring computation (to merge adjacent `data` events) which + // improves mm performance by 29%. + while (++index <= events.length) { + if (enter === undefined) { + if (events[index] && events[index][1].type === types.data) { + enter = index + index++ + } + } else if (!events[index] || events[index][1].type !== types.data) { + // Don’t do anything if there is one data token. + if (index !== enter + 2) { + events[enter][1].end = events[index - 1][1].end + events.splice(enter + 2, index - enter - 2) + index = enter + 2 + } + + enter = undefined + } + } + + return extraResolver ? extraResolver(events, context) : events + } +} + +// A rather ugly set of instructions which again looks at chunks in the input +// stream. +// The reason to do this here is that it is *much* faster to parse in reverse. +// And that we can’t hook into `null` to split the line suffix before an EOF. +// To do: figure out if we can make this into a clean utility, or even in core. +// As it will be useful for GFMs literal autolink extension (and maybe even +// tables?) +function resolveAllLineSuffixes(events, context) { + var eventIndex = -1 + var chunks + var data + var chunk + var index + var bufferIndex + var size + var tabs + var token + + while (++eventIndex <= events.length) { + if ( + (eventIndex === events.length || + events[eventIndex][1].type === types.lineEnding) && + events[eventIndex - 1][1].type === types.data + ) { + data = events[eventIndex - 1][1] + chunks = context.sliceStream(data) + index = chunks.length + bufferIndex = -1 + size = 0 + tabs = undefined + + while (index--) { + chunk = chunks[index] + + if (typeof chunk === 'string') { + bufferIndex = chunk.length + + while (chunk.charCodeAt(bufferIndex - 1) === codes.space) { + size++ + bufferIndex-- + } + + if (bufferIndex) break + bufferIndex = -1 + } + // Number + else if (chunk === codes.horizontalTab) { + tabs = true + size++ + } else if (chunk === codes.virtualSpace); + else { + // Replacement character, exit. + index++ + break + } + } + + if (size) { + token = { + type: + eventIndex === events.length || + tabs || + size < constants.hardBreakPrefixSizeMin + ? types.lineSuffix + : types.hardBreakTrailing, + start: { + line: data.end.line, + column: data.end.column - size, + offset: data.end.offset - size, + _index: data.start._index + index, + _bufferIndex: index + ? bufferIndex + : data.start._bufferIndex + bufferIndex + }, + end: shallow(data.end) + } + + data.end = shallow(token.start) + + if (data.start.offset === data.end.offset) { + assign(data, token) + } else { + events.splice( + eventIndex, + 0, + ['enter', token, context], + ['exit', token, context] + ) + eventIndex += 2 + } + } + + eventIndex++ + } + } + + return events +} + +exports.resolver = resolver +exports.string = string +exports.text = text diff --git a/node_modules/micromark/lib/initialize/text.mjs b/node_modules/micromark/lib/initialize/text.mjs new file mode 100644 index 00000000..8e1bca1f --- /dev/null +++ b/node_modules/micromark/lib/initialize/text.mjs @@ -0,0 +1,203 @@ +export var text = initializeFactory('text') +export var string = initializeFactory('string') +export var resolver = {resolveAll: createResolver()} + +import codes from '../character/codes.mjs' +import assign from '../constant/assign.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import shallow from '../util/shallow.mjs' + +function initializeFactory(field) { + return { + tokenize: initializeText, + resolveAll: createResolver( + field === 'text' ? resolveAllLineSuffixes : undefined + ) + } + + function initializeText(effects) { + var self = this + var constructs = this.parser.constructs[field] + var text = effects.attempt(constructs, start, notText) + + return start + + function start(code) { + return atBreak(code) ? text(code) : notText(code) + } + + function notText(code) { + if (code === codes.eof) { + effects.consume(code) + return + } + + effects.enter(types.data) + effects.consume(code) + return data + } + + function data(code) { + if (atBreak(code)) { + effects.exit(types.data) + return text(code) + } + + // Data. + effects.consume(code) + return data + } + + function atBreak(code) { + var list = constructs[code] + var index = -1 + + if (code === codes.eof) { + return true + } + + if (list) { + while (++index < list.length) { + if ( + !list[index].previous || + list[index].previous.call(self, self.previous) + ) { + return true + } + } + } + } + } +} + +function createResolver(extraResolver) { + return resolveAllText + + function resolveAllText(events, context) { + var index = -1 + var enter + + // A rather boring computation (to merge adjacent `data` events) which + // improves mm performance by 29%. + while (++index <= events.length) { + if (enter === undefined) { + if (events[index] && events[index][1].type === types.data) { + enter = index + index++ + } + } else if (!events[index] || events[index][1].type !== types.data) { + // Don’t do anything if there is one data token. + if (index !== enter + 2) { + events[enter][1].end = events[index - 1][1].end + events.splice(enter + 2, index - enter - 2) + index = enter + 2 + } + + enter = undefined + } + } + + return extraResolver ? extraResolver(events, context) : events + } +} + +// A rather ugly set of instructions which again looks at chunks in the input +// stream. +// The reason to do this here is that it is *much* faster to parse in reverse. +// And that we can’t hook into `null` to split the line suffix before an EOF. +// To do: figure out if we can make this into a clean utility, or even in core. +// As it will be useful for GFMs literal autolink extension (and maybe even +// tables?) +function resolveAllLineSuffixes(events, context) { + var eventIndex = -1 + var chunks + var data + var chunk + var index + var bufferIndex + var size + var tabs + var token + + while (++eventIndex <= events.length) { + if ( + (eventIndex === events.length || + events[eventIndex][1].type === types.lineEnding) && + events[eventIndex - 1][1].type === types.data + ) { + data = events[eventIndex - 1][1] + chunks = context.sliceStream(data) + index = chunks.length + bufferIndex = -1 + size = 0 + tabs = undefined + + while (index--) { + chunk = chunks[index] + + if (typeof chunk === 'string') { + bufferIndex = chunk.length + + while (chunk.charCodeAt(bufferIndex - 1) === codes.space) { + size++ + bufferIndex-- + } + + if (bufferIndex) break + bufferIndex = -1 + } + // Number + else if (chunk === codes.horizontalTab) { + tabs = true + size++ + } else if (chunk === codes.virtualSpace) { + // Empty + } else { + // Replacement character, exit. + index++ + break + } + } + + if (size) { + token = { + type: + eventIndex === events.length || + tabs || + size < constants.hardBreakPrefixSizeMin + ? types.lineSuffix + : types.hardBreakTrailing, + start: { + line: data.end.line, + column: data.end.column - size, + offset: data.end.offset - size, + _index: data.start._index + index, + _bufferIndex: index + ? bufferIndex + : data.start._bufferIndex + bufferIndex + }, + end: shallow(data.end) + } + + data.end = shallow(token.start) + + if (data.start.offset === data.end.offset) { + assign(data, token) + } else { + events.splice( + eventIndex, + 0, + ['enter', token, context], + ['exit', token, context] + ) + eventIndex += 2 + } + } + + eventIndex++ + } + } + + return events +} diff --git a/node_modules/micromark/lib/parse.d.ts b/node_modules/micromark/lib/parse.d.ts new file mode 100644 index 00000000..747750c5 --- /dev/null +++ b/node_modules/micromark/lib/parse.d.ts @@ -0,0 +1,5 @@ +import {ParseOptions, Parser} from './shared-types' + +declare function createParser(options?: ParseOptions): Parser + +export default createParser diff --git a/node_modules/micromark/lib/parse.js b/node_modules/micromark/lib/parse.js new file mode 100644 index 00000000..aad11f9e --- /dev/null +++ b/node_modules/micromark/lib/parse.js @@ -0,0 +1,36 @@ +'use strict' + +var content = require('./initialize/content.js') +var document = require('./initialize/document.js') +var flow = require('./initialize/flow.js') +var text = require('./initialize/text.js') +var combineExtensions = require('./util/combine-extensions.js') +var createTokenizer = require('./util/create-tokenizer.js') +var miniflat = require('./util/miniflat.js') +var constructs = require('./constructs.js') + +function parse(options) { + var settings = options || {} + var parser = { + defined: [], + constructs: combineExtensions( + [constructs].concat(miniflat(settings.extensions)) + ), + content: create(content), + document: create(document), + flow: create(flow), + string: create(text.string), + text: create(text.text) + } + + return parser + + function create(initializer) { + return creator + function creator(from) { + return createTokenizer(parser, initializer, from) + } + } +} + +module.exports = parse diff --git a/node_modules/micromark/lib/parse.mjs b/node_modules/micromark/lib/parse.mjs new file mode 100644 index 00000000..a4ca9ac6 --- /dev/null +++ b/node_modules/micromark/lib/parse.mjs @@ -0,0 +1,34 @@ +export default parse + +import * as initializeContent from './initialize/content.mjs' +import * as initializeDocument from './initialize/document.mjs' +import * as initializeFlow from './initialize/flow.mjs' +import * as initializeText from './initialize/text.mjs' +import combineExtensions from './util/combine-extensions.mjs' +import createTokenizer from './util/create-tokenizer.mjs' +import miniflat from './util/miniflat.mjs' +import * as constructs from './constructs.mjs' + +function parse(options) { + var settings = options || {} + var parser = { + defined: [], + constructs: combineExtensions( + [constructs].concat(miniflat(settings.extensions)) + ), + content: create(initializeContent), + document: create(initializeDocument), + flow: create(initializeFlow), + string: create(initializeText.string), + text: create(initializeText.text) + } + + return parser + + function create(initializer) { + return creator + function creator(from) { + return createTokenizer(parser, initializer, from) + } + } +} diff --git a/node_modules/micromark/lib/postprocess.d.ts b/node_modules/micromark/lib/postprocess.d.ts new file mode 100644 index 00000000..b2af6ac1 --- /dev/null +++ b/node_modules/micromark/lib/postprocess.d.ts @@ -0,0 +1,5 @@ +import {Event} from './shared-types' + +declare function postprocess(events: Event[]): Event[] + +export default postprocess diff --git a/node_modules/micromark/lib/postprocess.js b/node_modules/micromark/lib/postprocess.js new file mode 100644 index 00000000..842f8ce8 --- /dev/null +++ b/node_modules/micromark/lib/postprocess.js @@ -0,0 +1,13 @@ +'use strict' + +var subtokenize = require('./util/subtokenize.js') + +function postprocess(events) { + while (!subtokenize(events)) { + // Empty + } + + return events +} + +module.exports = postprocess diff --git a/node_modules/micromark/lib/postprocess.mjs b/node_modules/micromark/lib/postprocess.mjs new file mode 100644 index 00000000..f32e378d --- /dev/null +++ b/node_modules/micromark/lib/postprocess.mjs @@ -0,0 +1,11 @@ +export default postprocess + +import subtokenize from './util/subtokenize.mjs' + +function postprocess(events) { + while (!subtokenize(events)) { + // Empty + } + + return events +} diff --git a/node_modules/micromark/lib/preprocess.d.ts b/node_modules/micromark/lib/preprocess.d.ts new file mode 100644 index 00000000..95692ae5 --- /dev/null +++ b/node_modules/micromark/lib/preprocess.d.ts @@ -0,0 +1,11 @@ +import {BufferEncoding} from './shared-types' + +type PreprocessReturn = ( + value: string, + encoding: BufferEncoding, + end?: boolean +) => string[] + +declare function preprocess(): PreprocessReturn + +export default preprocess diff --git a/node_modules/micromark/lib/preprocess.js b/node_modules/micromark/lib/preprocess.js new file mode 100644 index 00000000..caf9c070 --- /dev/null +++ b/node_modules/micromark/lib/preprocess.js @@ -0,0 +1,96 @@ +'use strict' + +var codes = require('./character/codes.js') +var constants = require('./constant/constants.js') + +var search = /[\0\t\n\r]/g + +function preprocess() { + var start = true + var column = 1 + var buffer = '' + var atCarriageReturn + + return preprocessor + + function preprocessor(value, encoding, end) { + var chunks = [] + var match + var next + var startPosition + var endPosition + var code + + value = buffer + value.toString(encoding) + startPosition = 0 + buffer = '' + + if (start) { + if (value.charCodeAt(0) === codes.byteOrderMarker) { + startPosition++ + } + + start = undefined + } + + while (startPosition < value.length) { + search.lastIndex = startPosition + match = search.exec(value) + endPosition = match ? match.index : value.length + code = value.charCodeAt(endPosition) + + if (!match) { + buffer = value.slice(startPosition) + break + } + + if ( + code === codes.lf && + startPosition === endPosition && + atCarriageReturn + ) { + chunks.push(codes.carriageReturnLineFeed) + atCarriageReturn = undefined + } else { + if (atCarriageReturn) { + chunks.push(codes.carriageReturn) + atCarriageReturn = undefined + } + + if (startPosition < endPosition) { + chunks.push(value.slice(startPosition, endPosition)) + column += endPosition - startPosition + } + + if (code === codes.nul) { + chunks.push(codes.replacementCharacter) + column++ + } else if (code === codes.ht) { + next = Math.ceil(column / constants.tabSize) * constants.tabSize + chunks.push(codes.horizontalTab) + while (column++ < next) chunks.push(codes.virtualSpace) + } else if (code === codes.lf) { + chunks.push(codes.lineFeed) + column = 1 + } + // Must be carriage return. + else { + atCarriageReturn = true + column = 1 + } + } + + startPosition = endPosition + 1 + } + + if (end) { + if (atCarriageReturn) chunks.push(codes.carriageReturn) + if (buffer) chunks.push(buffer) + chunks.push(codes.eof) + } + + return chunks + } +} + +module.exports = preprocess diff --git a/node_modules/micromark/lib/preprocess.mjs b/node_modules/micromark/lib/preprocess.mjs new file mode 100644 index 00000000..4413159d --- /dev/null +++ b/node_modules/micromark/lib/preprocess.mjs @@ -0,0 +1,94 @@ +export default preprocess + +import codes from './character/codes.mjs' +import constants from './constant/constants.mjs' + +var search = /[\0\t\n\r]/g + +function preprocess() { + var start = true + var column = 1 + var buffer = '' + var atCarriageReturn + + return preprocessor + + function preprocessor(value, encoding, end) { + var chunks = [] + var match + var next + var startPosition + var endPosition + var code + + value = buffer + value.toString(encoding) + startPosition = 0 + buffer = '' + + if (start) { + if (value.charCodeAt(0) === codes.byteOrderMarker) { + startPosition++ + } + + start = undefined + } + + while (startPosition < value.length) { + search.lastIndex = startPosition + match = search.exec(value) + endPosition = match ? match.index : value.length + code = value.charCodeAt(endPosition) + + if (!match) { + buffer = value.slice(startPosition) + break + } + + if ( + code === codes.lf && + startPosition === endPosition && + atCarriageReturn + ) { + chunks.push(codes.carriageReturnLineFeed) + atCarriageReturn = undefined + } else { + if (atCarriageReturn) { + chunks.push(codes.carriageReturn) + atCarriageReturn = undefined + } + + if (startPosition < endPosition) { + chunks.push(value.slice(startPosition, endPosition)) + column += endPosition - startPosition + } + + if (code === codes.nul) { + chunks.push(codes.replacementCharacter) + column++ + } else if (code === codes.ht) { + next = Math.ceil(column / constants.tabSize) * constants.tabSize + chunks.push(codes.horizontalTab) + while (column++ < next) chunks.push(codes.virtualSpace) + } else if (code === codes.lf) { + chunks.push(codes.lineFeed) + column = 1 + } + // Must be carriage return. + else { + atCarriageReturn = true + column = 1 + } + } + + startPosition = endPosition + 1 + } + + if (end) { + if (atCarriageReturn) chunks.push(codes.carriageReturn) + if (buffer) chunks.push(buffer) + chunks.push(codes.eof) + } + + return chunks + } +} diff --git a/node_modules/micromark/lib/shared-types.d.ts b/node_modules/micromark/lib/shared-types.d.ts new file mode 100644 index 00000000..81595fd2 --- /dev/null +++ b/node_modules/micromark/lib/shared-types.d.ts @@ -0,0 +1,291 @@ +// Minimum TypeScript Version: 3.0 + +import {Code} from './character/codes' +import {Type} from './constant/types' + +/** + * A location in a string or buffer + */ +export interface Point { + line: number + column: number + offset: number + _index?: number + _bufferIndex?: number +} + +/** + * + */ +export interface Token { + type: Type + start: Point + end: Point + + previous?: Token + next?: Token + + /** + * Declares a token as having content of a certain type. + * Because markdown requires to first parse containers, flow, content completely, + * and then later go on to phrasing and such, it needs to be declared somewhere on the tokens. + */ + contentType?: 'flow' | 'content' | 'string' | 'text' + + /** + * Used when dealing with linked tokens. A child tokenizer is needed to tokenize them, which is stored on those tokens + */ + _tokenizer?: Tokenizer + + /** + * Close and open are also used in attention: + * depending on the characters before and after sequences (**), + * the sequence can open, close, both, or none + */ + _open?: boolean + + /** + * Close and open are also used in attention: + * depending on the characters before and after sequences (**), + * the sequence can open, close, both, or none + */ + _close?: boolean +} + +/** + * + */ +export type Event = [string, Token, Tokenizer] + +/** + * These these are transitions to update the CommonMark State Machine (CSMS) + */ +export interface Effects { + /** + * Enter and exit define where tokens start and end + */ + enter: (type: Type) => Token + + /** + * Enter and exit define where tokens start and end + */ + exit: (type: Type) => Token + + /** + * Consume deals with a character, and moves to the next + */ + consume: (code: number) => void + + /** + * Attempt deals with several values, and tries to parse according to those values. + * If a value resulted in `ok`, it worked, the tokens that were made are used, + * and `returnState` is switched to. + * If the result is `nok`, the attempt failed, + * so we revert to the original state, and `bogusState` is used. + */ + attempt: ( + constructInfo: + | Construct + | Construct[] + | Record<CodeAsKey, Construct | Construct[]>, + returnState: State, + bogusState?: State + ) => (code: Code) => void + + /** + * Interrupt is used for stuff right after a line of content. + */ + interrupt: ( + constructInfo: + | Construct + | Construct[] + | Record<CodeAsKey, Construct | Construct[]>, + ok: Okay, + nok?: NotOkay + ) => (code: Code) => void + + check: ( + constructInfo: + | Construct + | Construct[] + | Record<CodeAsKey, Construct | Construct[]>, + ok: Okay, + nok?: NotOkay + ) => (code: Code) => void + + /** + * Lazy is used for lines that were not properly preceded by the container. + */ + lazy: ( + constructInfo: + | Construct + | Construct[] + | Record<CodeAsKey, Construct | Construct[]>, + ok: Okay, + nok?: NotOkay + ) => void +} + +/** + * A state function should return another function: the next state-as-a-function to go to. + * + * But there is one case where they return void: for the eof character code (at the end of a value) + * The reason being: well, there isn’t any state that makes sense, so void works well. Practically + * that has also helped: if for some reason it was a mistake, then an exception is throw because + * there is no next function, meaning it surfaces early. + */ +export type State = (code: number) => State | void + +/** + * + */ +export type Okay = State + +/** + * + */ +export type NotOkay = State + +/** + * + */ +export interface Tokenizer { + previous: Code + events: Event[] + parser: Parser + sliceStream: (token: Token) => Chunk[] + sliceSerialize: (token: Token) => string + now: () => Point + defineSkip: (value: Point) => void + write: (slice: Chunk[]) => Event[] +} + +export type Resolve = (events: Event[], context: Tokenizer) => Event[] + +export type Tokenize = (context: Tokenizer, effects: Effects) => State + +export interface Construct { + name?: string + tokenize: Tokenize + partial?: boolean + resolve?: Resolve + resolveTo?: Resolve + resolveAll?: Resolve + concrete?: boolean + interruptible?: boolean + lazy?: boolean +} + +/** + * + */ +export interface Parser { + constructs: Record<CodeAsKey, Construct | Construct[]> + content: (from: Point) => Tokenizer + document: (from: Point) => Tokenizer + flow: (from: Point) => Tokenizer + string: (from: Point) => Tokenizer + text: (from: Point) => Tokenizer + defined: string[] +} + +/** + * + */ +export interface TokenizerThis { + events: Event[] + interrupt?: boolean + lazy?: boolean + containerState?: Record<string, unknown> +} + +/** + * `Compile` is the return value of `lib/compile/html.js` + */ +export type Compile = (slice: Event[]) => string + +/** + * https://github.com/micromark/micromark#syntaxextension + */ +export interface SyntaxExtension { + document?: Record<CodeAsKey, Construct | Construct[]> + contentInitial?: Record<CodeAsKey, Construct | Construct[]> + flowInitial?: Record<CodeAsKey, Construct | Construct[]> + flow?: Record<CodeAsKey, Construct | Construct[]> + string?: Record<CodeAsKey, Construct | Construct[]> + text?: Record<CodeAsKey, Construct | Construct[]> +} + +/** + * https://github.com/micromark/micromark#htmlextension + */ +export type HtmlExtension = + | {enter: Record<Type, () => void>} + | {exit: Record<Type, () => void>} + +export type Options = ParseOptions & CompileOptions + +export interface ParseOptions { + // Array of syntax extensions + // + extensions?: SyntaxExtension[] +} + +export interface CompileOptions { + // Value to use for line endings not in `doc` (`string`, default: first line + // ending or `'\n'`). + // + // Generally, micromark copies line endings (`'\r'`, `'\n'`, `'\r\n'`) in the + // markdown document over to the compiled HTML. + // In some cases, such as `> a`, CommonMark requires that extra line endings are + // added: `<blockquote>\n<p>a</p>\n</blockquote>`. + // + defaultLineEnding?: '\r' | '\n' | '\r\n' + // Whether to allow embedded HTML (`boolean`, default: `false`). + // + allowDangerousHtml?: boolean + // Whether to allow potentially dangerous protocols in links and images (`boolean`, + // default: `false`). + // URLs relative to the current protocol are always allowed (such as, `image.jpg`). + // For links, the allowed protocols are `http`, `https`, `irc`, `ircs`, `mailto`, + // and `xmpp`. + // For images, the allowed protocols are `http` and `https`. + // + allowDangerousProtocol?: boolean + // Array of HTML extensions + // + htmlExtensions?: HtmlExtension[] +} + +export type Chunk = NonNullable<Code> | string + +// TypeScript will complain that `null` can't be the key of an object. So when a `Code` value is a key of an object, use CodeAsKey instead. +export type CodeAsKey = NonNullable<Code> | 'null' + +/** + * Encodings supported by the buffer class + * + * @remarks + * This is a copy of the typing from Node, copied to prevent Node globals from being needed. + * Copied from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/a2bc1d868d81733a8969236655fa600bd3651a7b/types/node/globals.d.ts#L174 + */ +export type BufferEncoding = + | 'ascii' + | 'utf8' + | 'utf-8' + | 'utf16le' + | 'ucs2' + | 'ucs-2' + | 'base64' + | 'latin1' + | 'binary' + | 'hex' + +/** + * This is an interface for Node's Buffer. + */ +export interface Buffer { + toString: (encoding?: BufferEncoding) => string +} + +export type CodeCheck = (code: Code) => boolean diff --git a/node_modules/micromark/lib/stream.d.ts b/node_modules/micromark/lib/stream.d.ts new file mode 100644 index 00000000..b814c57b --- /dev/null +++ b/node_modules/micromark/lib/stream.d.ts @@ -0,0 +1,6 @@ +import {EventEmitter} from 'events' +import {Options} from './shared-types' + +declare function stream(options?: Options): EventEmitter + +export default stream diff --git a/node_modules/micromark/lib/stream.js b/node_modules/micromark/lib/stream.js new file mode 100644 index 00000000..07fb675b --- /dev/null +++ b/node_modules/micromark/lib/stream.js @@ -0,0 +1,119 @@ +'use strict' + +var events = require('events') +var html = require('./compile/html.js') +var parse = require('./parse.js') +var postprocess = require('./postprocess.js') +var preprocess = require('./preprocess.js') + +function stream(options) { + var preprocess$1 = preprocess() + var tokenize = parse(options).document().write + var compile = html(options) + var emitter = new events.EventEmitter() + var ended + + emitter.writable = emitter.readable = true + emitter.write = write + emitter.end = end + emitter.pipe = pipe + + return emitter + + // Write a chunk into memory. + function write(chunk, encoding, callback) { + if (typeof encoding === 'function') { + callback = encoding + encoding = undefined + } + + if (ended) { + throw new Error('Did not expect `write` after `end`') + } + + tokenize(preprocess$1(chunk || '', encoding)) + + if (callback) { + callback() + } + + // Signal succesful write. + return true + } + + // End the writing. + // Passes all arguments to a final `write`. + function end(chunk, encoding, callback) { + write(chunk, encoding, callback) + + emitter.emit( + 'data', + compile(postprocess(tokenize(preprocess$1('', encoding, true)))) + ) + + emitter.emit('end') + ended = true + return true + } + + // Pipe the processor into a writable stream. + // Basically `Stream#pipe`, but inlined and simplified to keep the bundled + // size down. + // See: <https://github.com/nodejs/node/blob/43a5170/lib/internal/streams/legacy.js#L13>. + function pipe(dest, options) { + emitter.on('data', ondata) + emitter.on('error', onerror) + emitter.on('end', cleanup) + emitter.on('close', cleanup) + + // If the `end` option is not supplied, `dest.end()` will be called when the + // `end` or `close` events are received. + if (!dest._isStdio && (!options || options.end !== false)) { + emitter.on('end', onend) + } + + dest.on('error', onerror) + dest.on('close', cleanup) + + dest.emit('pipe', emitter) + + return dest + + // End destination. + function onend() { + if (dest.end) { + dest.end() + } + } + + // Handle data. + function ondata(chunk) { + if (dest.writable) { + dest.write(chunk) + } + } + + // Clean listeners. + function cleanup() { + emitter.removeListener('data', ondata) + emitter.removeListener('end', onend) + emitter.removeListener('error', onerror) + emitter.removeListener('end', cleanup) + emitter.removeListener('close', cleanup) + + dest.removeListener('error', onerror) + dest.removeListener('close', cleanup) + } + + // Close dangling pipes and handle unheard errors. + function onerror(error) { + cleanup() + + if (!emitter.listenerCount('error')) { + throw error // Unhandled stream error in pipe. + } + } + } +} + +module.exports = stream diff --git a/node_modules/micromark/lib/stream.mjs b/node_modules/micromark/lib/stream.mjs new file mode 100644 index 00000000..d00c74a8 --- /dev/null +++ b/node_modules/micromark/lib/stream.mjs @@ -0,0 +1,117 @@ +export default stream + +import {EventEmitter} from 'events' +import compiler from './compile/html.mjs' +import parser from './parse.mjs' +import postprocess from './postprocess.mjs' +import preprocessor from './preprocess.mjs' + +function stream(options) { + var preprocess = preprocessor() + var tokenize = parser(options).document().write + var compile = compiler(options) + var emitter = new EventEmitter() + var ended + + emitter.writable = emitter.readable = true + emitter.write = write + emitter.end = end + emitter.pipe = pipe + + return emitter + + // Write a chunk into memory. + function write(chunk, encoding, callback) { + if (typeof encoding === 'function') { + callback = encoding + encoding = undefined + } + + if (ended) { + throw new Error('Did not expect `write` after `end`') + } + + tokenize(preprocess(chunk || '', encoding)) + + if (callback) { + callback() + } + + // Signal succesful write. + return true + } + + // End the writing. + // Passes all arguments to a final `write`. + function end(chunk, encoding, callback) { + write(chunk, encoding, callback) + + emitter.emit( + 'data', + compile(postprocess(tokenize(preprocess('', encoding, true)))) + ) + + emitter.emit('end') + ended = true + return true + } + + // Pipe the processor into a writable stream. + // Basically `Stream#pipe`, but inlined and simplified to keep the bundled + // size down. + // See: <https://github.com/nodejs/node/blob/43a5170/lib/internal/streams/legacy.js#L13>. + function pipe(dest, options) { + emitter.on('data', ondata) + emitter.on('error', onerror) + emitter.on('end', cleanup) + emitter.on('close', cleanup) + + // If the `end` option is not supplied, `dest.end()` will be called when the + // `end` or `close` events are received. + if (!dest._isStdio && (!options || options.end !== false)) { + emitter.on('end', onend) + } + + dest.on('error', onerror) + dest.on('close', cleanup) + + dest.emit('pipe', emitter) + + return dest + + // End destination. + function onend() { + if (dest.end) { + dest.end() + } + } + + // Handle data. + function ondata(chunk) { + if (dest.writable) { + dest.write(chunk) + } + } + + // Clean listeners. + function cleanup() { + emitter.removeListener('data', ondata) + emitter.removeListener('end', onend) + emitter.removeListener('error', onerror) + emitter.removeListener('end', cleanup) + emitter.removeListener('close', cleanup) + + dest.removeListener('error', onerror) + dest.removeListener('close', cleanup) + } + + // Close dangling pipes and handle unheard errors. + function onerror(error) { + cleanup() + + if (!emitter.listenerCount('error')) { + throw error // Unhandled stream error in pipe. + } + } + } +} diff --git a/node_modules/micromark/lib/tokenize/attention.js b/node_modules/micromark/lib/tokenize/attention.js new file mode 100644 index 00000000..b38970d9 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/attention.js @@ -0,0 +1,216 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var chunkedPush = require('../util/chunked-push.js') +var chunkedSplice = require('../util/chunked-splice.js') +var classifyCharacter = require('../util/classify-character.js') +var movePoint = require('../util/move-point.js') +var resolveAll = require('../util/resolve-all.js') +var shallow = require('../util/shallow.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var attention = { + name: 'attention', + tokenize: tokenizeAttention, + resolveAll: resolveAllAttention +} + +// Take all events and resolve attention to emphasis or strong. +function resolveAllAttention(events, context) { + var index = -1 + var open + var group + var text + var openingSequence + var closingSequence + var use + var nextEvents + var offset + + // Walk through all events. + // + // Note: performance of this is fine on an mb of normal markdown, but it’s + // a bottleneck for malicious stuff. + while (++index < events.length) { + // Find a token that can close. + if ( + events[index][0] === 'enter' && + events[index][1].type === 'attentionSequence' && + events[index][1]._close + ) { + open = index + + // Now walk back to find an opener. + while (open--) { + // Find a token that can open the closer. + if ( + events[open][0] === 'exit' && + events[open][1].type === 'attentionSequence' && + events[open][1]._open && + // If the markers are the same: + context.sliceSerialize(events[open][1]).charCodeAt(0) === + context.sliceSerialize(events[index][1]).charCodeAt(0) + ) { + // If the opening can close or the closing can open, + // and the close size *is not* a multiple of three, + // but the sum of the opening and closing size *is* multiple of three, + // then don’t match. + if ( + (events[open][1]._close || events[index][1]._open) && + (events[index][1].end.offset - events[index][1].start.offset) % 3 && + !( + (events[open][1].end.offset - + events[open][1].start.offset + + events[index][1].end.offset - + events[index][1].start.offset) % + 3 + ) + ) { + continue + } + + // Number of markers to use from the sequence. + use = + events[open][1].end.offset - events[open][1].start.offset > 1 && + events[index][1].end.offset - events[index][1].start.offset > 1 + ? 2 + : 1 + + openingSequence = { + type: use > 1 ? types.strongSequence : types.emphasisSequence, + start: movePoint(shallow(events[open][1].end), -use), + end: shallow(events[open][1].end) + } + closingSequence = { + type: use > 1 ? types.strongSequence : types.emphasisSequence, + start: shallow(events[index][1].start), + end: movePoint(shallow(events[index][1].start), use) + } + text = { + type: use > 1 ? types.strongText : types.emphasisText, + start: shallow(events[open][1].end), + end: shallow(events[index][1].start) + } + group = { + type: use > 1 ? types.strong : types.emphasis, + start: shallow(openingSequence.start), + end: shallow(closingSequence.end) + } + + events[open][1].end = shallow(openingSequence.start) + events[index][1].start = shallow(closingSequence.end) + + nextEvents = [] + + // If there are more markers in the opening, add them before. + if (events[open][1].end.offset - events[open][1].start.offset) { + nextEvents = chunkedPush(nextEvents, [ + ['enter', events[open][1], context], + ['exit', events[open][1], context] + ]) + } + + // Opening. + nextEvents = chunkedPush(nextEvents, [ + ['enter', group, context], + ['enter', openingSequence, context], + ['exit', openingSequence, context], + ['enter', text, context] + ]) + + // Between. + nextEvents = chunkedPush( + nextEvents, + resolveAll( + context.parser.constructs.insideSpan.null, + events.slice(open + 1, index), + context + ) + ) + + // Closing. + nextEvents = chunkedPush(nextEvents, [ + ['exit', text, context], + ['enter', closingSequence, context], + ['exit', closingSequence, context], + ['exit', group, context] + ]) + + // If there are more markers in the closing, add them after. + if (events[index][1].end.offset - events[index][1].start.offset) { + offset = 2 + nextEvents = chunkedPush(nextEvents, [ + ['enter', events[index][1], context], + ['exit', events[index][1], context] + ]) + } else { + offset = 0 + } + + chunkedSplice(events, open - 1, index - open + 3, nextEvents) + + index = open + nextEvents.length - offset - 2 + break + } + } + } + } + + // Remove remaining sequences. + index = -1 + + while (++index < events.length) { + if (events[index][1].type === 'attentionSequence') { + events[index][1].type = 'data' + } + } + + return events +} + +function tokenizeAttention(effects, ok) { + var before = classifyCharacter(this.previous) + var marker + + return start + + function start(code) { + assert__default['default']( + code === codes.asterisk || code === codes.underscore, + 'expected asterisk or underscore' + ) + effects.enter('attentionSequence') + marker = code + return sequence(code) + } + + function sequence(code) { + var token + var after + var open + var close + + if (code === marker) { + effects.consume(code) + return sequence + } + + token = effects.exit('attentionSequence') + after = classifyCharacter(code) + open = !after || (after === constants.characterGroupPunctuation && before) + close = !before || (before === constants.characterGroupPunctuation && after) + token._open = marker === codes.asterisk ? open : open && (before || !close) + token._close = marker === codes.asterisk ? close : close && (after || !open) + return ok(code) + } +} + +module.exports = attention diff --git a/node_modules/micromark/lib/tokenize/attention.mjs b/node_modules/micromark/lib/tokenize/attention.mjs new file mode 100644 index 00000000..a3c81460 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/attention.mjs @@ -0,0 +1,207 @@ +var attention = { + name: 'attention', + tokenize: tokenizeAttention, + resolveAll: resolveAllAttention +} +export default attention + +import assert from 'assert' +import codes from '../character/codes.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import chunkedPush from '../util/chunked-push.mjs' +import chunkedSplice from '../util/chunked-splice.mjs' +import classifyCharacter from '../util/classify-character.mjs' +import movePoint from '../util/move-point.mjs' +import resolveAll from '../util/resolve-all.mjs' +import shallow from '../util/shallow.mjs' + +// Take all events and resolve attention to emphasis or strong. +function resolveAllAttention(events, context) { + var index = -1 + var open + var group + var text + var openingSequence + var closingSequence + var use + var nextEvents + var offset + + // Walk through all events. + // + // Note: performance of this is fine on an mb of normal markdown, but it’s + // a bottleneck for malicious stuff. + while (++index < events.length) { + // Find a token that can close. + if ( + events[index][0] === 'enter' && + events[index][1].type === 'attentionSequence' && + events[index][1]._close + ) { + open = index + + // Now walk back to find an opener. + while (open--) { + // Find a token that can open the closer. + if ( + events[open][0] === 'exit' && + events[open][1].type === 'attentionSequence' && + events[open][1]._open && + // If the markers are the same: + context.sliceSerialize(events[open][1]).charCodeAt(0) === + context.sliceSerialize(events[index][1]).charCodeAt(0) + ) { + // If the opening can close or the closing can open, + // and the close size *is not* a multiple of three, + // but the sum of the opening and closing size *is* multiple of three, + // then don’t match. + if ( + (events[open][1]._close || events[index][1]._open) && + (events[index][1].end.offset - events[index][1].start.offset) % 3 && + !( + (events[open][1].end.offset - + events[open][1].start.offset + + events[index][1].end.offset - + events[index][1].start.offset) % + 3 + ) + ) { + continue + } + + // Number of markers to use from the sequence. + use = + events[open][1].end.offset - events[open][1].start.offset > 1 && + events[index][1].end.offset - events[index][1].start.offset > 1 + ? 2 + : 1 + + openingSequence = { + type: use > 1 ? types.strongSequence : types.emphasisSequence, + start: movePoint(shallow(events[open][1].end), -use), + end: shallow(events[open][1].end) + } + closingSequence = { + type: use > 1 ? types.strongSequence : types.emphasisSequence, + start: shallow(events[index][1].start), + end: movePoint(shallow(events[index][1].start), use) + } + text = { + type: use > 1 ? types.strongText : types.emphasisText, + start: shallow(events[open][1].end), + end: shallow(events[index][1].start) + } + group = { + type: use > 1 ? types.strong : types.emphasis, + start: shallow(openingSequence.start), + end: shallow(closingSequence.end) + } + + events[open][1].end = shallow(openingSequence.start) + events[index][1].start = shallow(closingSequence.end) + + nextEvents = [] + + // If there are more markers in the opening, add them before. + if (events[open][1].end.offset - events[open][1].start.offset) { + nextEvents = chunkedPush(nextEvents, [ + ['enter', events[open][1], context], + ['exit', events[open][1], context] + ]) + } + + // Opening. + nextEvents = chunkedPush(nextEvents, [ + ['enter', group, context], + ['enter', openingSequence, context], + ['exit', openingSequence, context], + ['enter', text, context] + ]) + + // Between. + nextEvents = chunkedPush( + nextEvents, + resolveAll( + context.parser.constructs.insideSpan.null, + events.slice(open + 1, index), + context + ) + ) + + // Closing. + nextEvents = chunkedPush(nextEvents, [ + ['exit', text, context], + ['enter', closingSequence, context], + ['exit', closingSequence, context], + ['exit', group, context] + ]) + + // If there are more markers in the closing, add them after. + if (events[index][1].end.offset - events[index][1].start.offset) { + offset = 2 + nextEvents = chunkedPush(nextEvents, [ + ['enter', events[index][1], context], + ['exit', events[index][1], context] + ]) + } else { + offset = 0 + } + + chunkedSplice(events, open - 1, index - open + 3, nextEvents) + + index = open + nextEvents.length - offset - 2 + break + } + } + } + } + + // Remove remaining sequences. + index = -1 + + while (++index < events.length) { + if (events[index][1].type === 'attentionSequence') { + events[index][1].type = 'data' + } + } + + return events +} + +function tokenizeAttention(effects, ok) { + var before = classifyCharacter(this.previous) + var marker + + return start + + function start(code) { + assert( + code === codes.asterisk || code === codes.underscore, + 'expected asterisk or underscore' + ) + effects.enter('attentionSequence') + marker = code + return sequence(code) + } + + function sequence(code) { + var token + var after + var open + var close + + if (code === marker) { + effects.consume(code) + return sequence + } + + token = effects.exit('attentionSequence') + after = classifyCharacter(code) + open = !after || (after === constants.characterGroupPunctuation && before) + close = !before || (before === constants.characterGroupPunctuation && after) + token._open = marker === codes.asterisk ? open : open && (before || !close) + token._close = marker === codes.asterisk ? close : close && (after || !open) + return ok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/autolink.js b/node_modules/micromark/lib/tokenize/autolink.js new file mode 100644 index 00000000..037280bb --- /dev/null +++ b/node_modules/micromark/lib/tokenize/autolink.js @@ -0,0 +1,147 @@ +'use strict' + +var assert = require('assert') +var asciiAlpha = require('../character/ascii-alpha.js') +var asciiAlphanumeric = require('../character/ascii-alphanumeric.js') +var asciiAtext = require('../character/ascii-atext.js') +var asciiControl = require('../character/ascii-control.js') +var codes = require('../character/codes.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var autolink = { + name: 'autolink', + tokenize: tokenizeAutolink +} + +function tokenizeAutolink(effects, ok, nok) { + var size = 1 + + return start + + function start(code) { + assert__default['default'](code === codes.lessThan, 'expected `<`') + effects.enter(types.autolink) + effects.enter(types.autolinkMarker) + effects.consume(code) + effects.exit(types.autolinkMarker) + effects.enter(types.autolinkProtocol) + return open + } + + function open(code) { + if (asciiAlpha(code)) { + effects.consume(code) + return schemeOrEmailAtext + } + + return asciiAtext(code) ? emailAtext(code) : nok(code) + } + + function schemeOrEmailAtext(code) { + return code === codes.plusSign || + code === codes.dash || + code === codes.dot || + asciiAlphanumeric(code) + ? schemeInsideOrEmailAtext(code) + : emailAtext(code) + } + + function schemeInsideOrEmailAtext(code) { + if (code === codes.colon) { + effects.consume(code) + return urlInside + } + + if ( + (code === codes.plusSign || + code === codes.dash || + code === codes.dot || + asciiAlphanumeric(code)) && + size++ < constants.autolinkSchemeSizeMax + ) { + effects.consume(code) + return schemeInsideOrEmailAtext + } + + return emailAtext(code) + } + + function urlInside(code) { + if (code === codes.greaterThan) { + effects.exit(types.autolinkProtocol) + return end(code) + } + + if (code === codes.space || code === codes.lessThan || asciiControl(code)) { + return nok(code) + } + + effects.consume(code) + return urlInside + } + + function emailAtext(code) { + if (code === codes.atSign) { + effects.consume(code) + size = 0 + return emailAtSignOrDot + } + + if (asciiAtext(code)) { + effects.consume(code) + return emailAtext + } + + return nok(code) + } + + function emailAtSignOrDot(code) { + return asciiAlphanumeric(code) ? emailLabel(code) : nok(code) + } + + function emailLabel(code) { + if (code === codes.dot) { + effects.consume(code) + size = 0 + return emailAtSignOrDot + } + + if (code === codes.greaterThan) { + // Exit, then change the type. + effects.exit(types.autolinkProtocol).type = types.autolinkEmail + return end(code) + } + + return emailValue(code) + } + + function emailValue(code) { + if ( + (code === codes.dash || asciiAlphanumeric(code)) && + size++ < constants.autolinkDomainSizeMax + ) { + effects.consume(code) + return code === codes.dash ? emailValue : emailLabel + } + + return nok(code) + } + + function end(code) { + assert__default['default'].equal(code, codes.greaterThan, 'expected `>`') + effects.enter(types.autolinkMarker) + effects.consume(code) + effects.exit(types.autolinkMarker) + effects.exit(types.autolink) + return ok + } +} + +module.exports = autolink diff --git a/node_modules/micromark/lib/tokenize/autolink.mjs b/node_modules/micromark/lib/tokenize/autolink.mjs new file mode 100644 index 00000000..890cd6c4 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/autolink.mjs @@ -0,0 +1,138 @@ +var autolink = { + name: 'autolink', + tokenize: tokenizeAutolink +} +export default autolink + +import assert from 'assert' +import asciiAlpha from '../character/ascii-alpha.mjs' +import asciiAlphanumeric from '../character/ascii-alphanumeric.mjs' +import asciiAtext from '../character/ascii-atext.mjs' +import asciiControl from '../character/ascii-control.mjs' +import codes from '../character/codes.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' + +function tokenizeAutolink(effects, ok, nok) { + var size = 1 + + return start + + function start(code) { + assert(code === codes.lessThan, 'expected `<`') + effects.enter(types.autolink) + effects.enter(types.autolinkMarker) + effects.consume(code) + effects.exit(types.autolinkMarker) + effects.enter(types.autolinkProtocol) + return open + } + + function open(code) { + if (asciiAlpha(code)) { + effects.consume(code) + return schemeOrEmailAtext + } + + return asciiAtext(code) ? emailAtext(code) : nok(code) + } + + function schemeOrEmailAtext(code) { + return code === codes.plusSign || + code === codes.dash || + code === codes.dot || + asciiAlphanumeric(code) + ? schemeInsideOrEmailAtext(code) + : emailAtext(code) + } + + function schemeInsideOrEmailAtext(code) { + if (code === codes.colon) { + effects.consume(code) + return urlInside + } + + if ( + (code === codes.plusSign || + code === codes.dash || + code === codes.dot || + asciiAlphanumeric(code)) && + size++ < constants.autolinkSchemeSizeMax + ) { + effects.consume(code) + return schemeInsideOrEmailAtext + } + + return emailAtext(code) + } + + function urlInside(code) { + if (code === codes.greaterThan) { + effects.exit(types.autolinkProtocol) + return end(code) + } + + if (code === codes.space || code === codes.lessThan || asciiControl(code)) { + return nok(code) + } + + effects.consume(code) + return urlInside + } + + function emailAtext(code) { + if (code === codes.atSign) { + effects.consume(code) + size = 0 + return emailAtSignOrDot + } + + if (asciiAtext(code)) { + effects.consume(code) + return emailAtext + } + + return nok(code) + } + + function emailAtSignOrDot(code) { + return asciiAlphanumeric(code) ? emailLabel(code) : nok(code) + } + + function emailLabel(code) { + if (code === codes.dot) { + effects.consume(code) + size = 0 + return emailAtSignOrDot + } + + if (code === codes.greaterThan) { + // Exit, then change the type. + effects.exit(types.autolinkProtocol).type = types.autolinkEmail + return end(code) + } + + return emailValue(code) + } + + function emailValue(code) { + if ( + (code === codes.dash || asciiAlphanumeric(code)) && + size++ < constants.autolinkDomainSizeMax + ) { + effects.consume(code) + return code === codes.dash ? emailValue : emailLabel + } + + return nok(code) + } + + function end(code) { + assert.equal(code, codes.greaterThan, 'expected `>`') + effects.enter(types.autolinkMarker) + effects.consume(code) + effects.exit(types.autolinkMarker) + effects.exit(types.autolink) + return ok + } +} diff --git a/node_modules/micromark/lib/tokenize/block-quote.js b/node_modules/micromark/lib/tokenize/block-quote.js new file mode 100644 index 00000000..66f58d07 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/block-quote.js @@ -0,0 +1,67 @@ +'use strict' + +var codes = require('../character/codes.js') +var markdownSpace = require('../character/markdown-space.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var factorySpace = require('./factory-space.js') + +var blockQuote = { + name: 'blockQuote', + tokenize: tokenizeBlockQuoteStart, + continuation: {tokenize: tokenizeBlockQuoteContinuation}, + exit: exit +} + +function tokenizeBlockQuoteStart(effects, ok, nok) { + var self = this + + return start + + function start(code) { + if (code === codes.greaterThan) { + if (!self.containerState.open) { + effects.enter(types.blockQuote, {_container: true}) + self.containerState.open = true + } + + effects.enter(types.blockQuotePrefix) + effects.enter(types.blockQuoteMarker) + effects.consume(code) + effects.exit(types.blockQuoteMarker) + return after + } + + return nok(code) + } + + function after(code) { + if (markdownSpace(code)) { + effects.enter(types.blockQuotePrefixWhitespace) + effects.consume(code) + effects.exit(types.blockQuotePrefixWhitespace) + effects.exit(types.blockQuotePrefix) + return ok + } + + effects.exit(types.blockQuotePrefix) + return ok(code) + } +} + +function tokenizeBlockQuoteContinuation(effects, ok, nok) { + return factorySpace( + effects, + effects.attempt(blockQuote, ok, nok), + types.linePrefix, + this.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + ) +} + +function exit(effects) { + effects.exit(types.blockQuote) +} + +module.exports = blockQuote diff --git a/node_modules/micromark/lib/tokenize/block-quote.mjs b/node_modules/micromark/lib/tokenize/block-quote.mjs new file mode 100644 index 00000000..cf215ba6 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/block-quote.mjs @@ -0,0 +1,64 @@ +var blockQuote = { + name: 'blockQuote', + tokenize: tokenizeBlockQuoteStart, + continuation: {tokenize: tokenizeBlockQuoteContinuation}, + exit: exit +} +export default blockQuote + +import codes from '../character/codes.mjs' +import markdownSpace from '../character/markdown-space.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import spaceFactory from './factory-space.mjs' + +function tokenizeBlockQuoteStart(effects, ok, nok) { + var self = this + + return start + + function start(code) { + if (code === codes.greaterThan) { + if (!self.containerState.open) { + effects.enter(types.blockQuote, {_container: true}) + self.containerState.open = true + } + + effects.enter(types.blockQuotePrefix) + effects.enter(types.blockQuoteMarker) + effects.consume(code) + effects.exit(types.blockQuoteMarker) + return after + } + + return nok(code) + } + + function after(code) { + if (markdownSpace(code)) { + effects.enter(types.blockQuotePrefixWhitespace) + effects.consume(code) + effects.exit(types.blockQuotePrefixWhitespace) + effects.exit(types.blockQuotePrefix) + return ok + } + + effects.exit(types.blockQuotePrefix) + return ok(code) + } +} + +function tokenizeBlockQuoteContinuation(effects, ok, nok) { + return spaceFactory( + effects, + effects.attempt(blockQuote, ok, nok), + types.linePrefix, + this.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + ) +} + +function exit(effects) { + effects.exit(types.blockQuote) +} diff --git a/node_modules/micromark/lib/tokenize/character-escape.js b/node_modules/micromark/lib/tokenize/character-escape.js new file mode 100644 index 00000000..2c796400 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/character-escape.js @@ -0,0 +1,44 @@ +'use strict' + +var assert = require('assert') +var asciiPunctuation = require('../character/ascii-punctuation.js') +var codes = require('../character/codes.js') +var types = require('../constant/types.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var characterEscape = { + name: 'characterEscape', + tokenize: tokenizeCharacterEscape +} + +function tokenizeCharacterEscape(effects, ok, nok) { + return start + + function start(code) { + assert__default['default'](code === codes.backslash, 'expected `\\`') + effects.enter(types.characterEscape) + effects.enter(types.escapeMarker) + effects.consume(code) + effects.exit(types.escapeMarker) + return open + } + + function open(code) { + if (asciiPunctuation(code)) { + effects.enter(types.characterEscapeValue) + effects.consume(code) + effects.exit(types.characterEscapeValue) + effects.exit(types.characterEscape) + return ok + } + + return nok(code) + } +} + +module.exports = characterEscape diff --git a/node_modules/micromark/lib/tokenize/character-escape.mjs b/node_modules/micromark/lib/tokenize/character-escape.mjs new file mode 100644 index 00000000..fae1f771 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/character-escape.mjs @@ -0,0 +1,35 @@ +var characterEscape = { + name: 'characterEscape', + tokenize: tokenizeCharacterEscape +} +export default characterEscape + +import assert from 'assert' +import asciiPunctuation from '../character/ascii-punctuation.mjs' +import codes from '../character/codes.mjs' +import types from '../constant/types.mjs' + +function tokenizeCharacterEscape(effects, ok, nok) { + return start + + function start(code) { + assert(code === codes.backslash, 'expected `\\`') + effects.enter(types.characterEscape) + effects.enter(types.escapeMarker) + effects.consume(code) + effects.exit(types.escapeMarker) + return open + } + + function open(code) { + if (asciiPunctuation(code)) { + effects.enter(types.characterEscapeValue) + effects.consume(code) + effects.exit(types.characterEscapeValue) + effects.exit(types.characterEscape) + return ok + } + + return nok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/character-reference.js b/node_modules/micromark/lib/tokenize/character-reference.js new file mode 100644 index 00000000..0f3966c6 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/character-reference.js @@ -0,0 +1,101 @@ +'use strict' + +var assert = require('assert') +var decodeEntity = require('parse-entities/decode-entity.js') +var asciiAlphanumeric = require('../character/ascii-alphanumeric.js') +var asciiDigit = require('../character/ascii-digit.js') +var asciiHexDigit = require('../character/ascii-hex-digit.js') +var codes = require('../character/codes.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) +var decodeEntity__default = /*#__PURE__*/ _interopDefaultLegacy(decodeEntity) + +var characterReference = { + name: 'characterReference', + tokenize: tokenizeCharacterReference +} + +function tokenizeCharacterReference(effects, ok, nok) { + var self = this + var size = 0 + var max + var test + + return start + + function start(code) { + assert__default['default'](code === codes.ampersand, 'expected `&`') + effects.enter(types.characterReference) + effects.enter(types.characterReferenceMarker) + effects.consume(code) + effects.exit(types.characterReferenceMarker) + return open + } + + function open(code) { + if (code === codes.numberSign) { + effects.enter(types.characterReferenceMarkerNumeric) + effects.consume(code) + effects.exit(types.characterReferenceMarkerNumeric) + return numeric + } + + effects.enter(types.characterReferenceValue) + max = constants.characterReferenceNamedSizeMax + test = asciiAlphanumeric + return value(code) + } + + function numeric(code) { + if (code === codes.uppercaseX || code === codes.lowercaseX) { + effects.enter(types.characterReferenceMarkerHexadecimal) + effects.consume(code) + effects.exit(types.characterReferenceMarkerHexadecimal) + effects.enter(types.characterReferenceValue) + max = constants.characterReferenceHexadecimalSizeMax + test = asciiHexDigit + return value + } + + effects.enter(types.characterReferenceValue) + max = constants.characterReferenceDecimalSizeMax + test = asciiDigit + return value(code) + } + + function value(code) { + var token + + if (code === codes.semicolon && size) { + token = effects.exit(types.characterReferenceValue) + + if ( + test === asciiAlphanumeric && + !decodeEntity__default['default'](self.sliceSerialize(token)) + ) { + return nok(code) + } + + effects.enter(types.characterReferenceMarker) + effects.consume(code) + effects.exit(types.characterReferenceMarker) + effects.exit(types.characterReference) + return ok + } + + if (test(code) && size++ < max) { + effects.consume(code) + return value + } + + return nok(code) + } +} + +module.exports = characterReference diff --git a/node_modules/micromark/lib/tokenize/character-reference.mjs b/node_modules/micromark/lib/tokenize/character-reference.mjs new file mode 100644 index 00000000..eb76075a --- /dev/null +++ b/node_modules/micromark/lib/tokenize/character-reference.mjs @@ -0,0 +1,88 @@ +var characterReference = { + name: 'characterReference', + tokenize: tokenizeCharacterReference +} +export default characterReference + +import assert from 'assert' +import decode from 'parse-entities/decode-entity.js' +import asciiAlphanumeric from '../character/ascii-alphanumeric.mjs' +import asciiDigit from '../character/ascii-digit.mjs' +import asciiHexDigit from '../character/ascii-hex-digit.mjs' +import codes from '../character/codes.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' + +function tokenizeCharacterReference(effects, ok, nok) { + var self = this + var size = 0 + var max + var test + + return start + + function start(code) { + assert(code === codes.ampersand, 'expected `&`') + effects.enter(types.characterReference) + effects.enter(types.characterReferenceMarker) + effects.consume(code) + effects.exit(types.characterReferenceMarker) + return open + } + + function open(code) { + if (code === codes.numberSign) { + effects.enter(types.characterReferenceMarkerNumeric) + effects.consume(code) + effects.exit(types.characterReferenceMarkerNumeric) + return numeric + } + + effects.enter(types.characterReferenceValue) + max = constants.characterReferenceNamedSizeMax + test = asciiAlphanumeric + return value(code) + } + + function numeric(code) { + if (code === codes.uppercaseX || code === codes.lowercaseX) { + effects.enter(types.characterReferenceMarkerHexadecimal) + effects.consume(code) + effects.exit(types.characterReferenceMarkerHexadecimal) + effects.enter(types.characterReferenceValue) + max = constants.characterReferenceHexadecimalSizeMax + test = asciiHexDigit + return value + } + + effects.enter(types.characterReferenceValue) + max = constants.characterReferenceDecimalSizeMax + test = asciiDigit + return value(code) + } + + function value(code) { + var token + + if (code === codes.semicolon && size) { + token = effects.exit(types.characterReferenceValue) + + if (test === asciiAlphanumeric && !decode(self.sliceSerialize(token))) { + return nok(code) + } + + effects.enter(types.characterReferenceMarker) + effects.consume(code) + effects.exit(types.characterReferenceMarker) + effects.exit(types.characterReference) + return ok + } + + if (test(code) && size++ < max) { + effects.consume(code) + return value + } + + return nok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/code-fenced.js b/node_modules/micromark/lib/tokenize/code-fenced.js new file mode 100644 index 00000000..f7358358 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/code-fenced.js @@ -0,0 +1,185 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var prefixSize = require('../util/prefix-size.js') +var factorySpace = require('./factory-space.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var codeFenced = { + name: 'codeFenced', + tokenize: tokenizeCodeFenced, + concrete: true +} + +function tokenizeCodeFenced(effects, ok, nok) { + var self = this + var closingFenceConstruct = {tokenize: tokenizeClosingFence, partial: true} + var initialPrefix = prefixSize(this.events, types.linePrefix) + var sizeOpen = 0 + var marker + + return start + + function start(code) { + assert__default['default']( + code === codes.graveAccent || code === codes.tilde, + 'expected `` ` `` or `~`' + ) + effects.enter(types.codeFenced) + effects.enter(types.codeFencedFence) + effects.enter(types.codeFencedFenceSequence) + marker = code + return sequenceOpen(code) + } + + function sequenceOpen(code) { + if (code === marker) { + effects.consume(code) + sizeOpen++ + return sequenceOpen + } + + effects.exit(types.codeFencedFenceSequence) + return sizeOpen < constants.codeFencedSequenceSizeMin + ? nok(code) + : factorySpace(effects, infoOpen, types.whitespace)(code) + } + + function infoOpen(code) { + if (code === codes.eof || markdownLineEnding(code)) { + return openAfter(code) + } + + effects.enter(types.codeFencedFenceInfo) + effects.enter(types.chunkString, {contentType: constants.contentTypeString}) + return info(code) + } + + function info(code) { + if (code === codes.eof || markdownLineEndingOrSpace(code)) { + effects.exit(types.chunkString) + effects.exit(types.codeFencedFenceInfo) + return factorySpace(effects, infoAfter, types.whitespace)(code) + } + + if (code === codes.graveAccent && code === marker) return nok(code) + effects.consume(code) + return info + } + + function infoAfter(code) { + if (code === codes.eof || markdownLineEnding(code)) { + return openAfter(code) + } + + effects.enter(types.codeFencedFenceMeta) + effects.enter(types.chunkString, {contentType: constants.contentTypeString}) + return meta(code) + } + + function meta(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.chunkString) + effects.exit(types.codeFencedFenceMeta) + return openAfter(code) + } + + if (code === codes.graveAccent && code === marker) return nok(code) + effects.consume(code) + return meta + } + + function openAfter(code) { + effects.exit(types.codeFencedFence) + return self.interrupt ? ok(code) : content(code) + } + + function content(code) { + if (code === codes.eof) { + return after(code) + } + + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return effects.attempt( + closingFenceConstruct, + after, + initialPrefix + ? factorySpace(effects, content, types.linePrefix, initialPrefix + 1) + : content + ) + } + + effects.enter(types.codeFlowValue) + return contentContinue(code) + } + + function contentContinue(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.codeFlowValue) + return content(code) + } + + effects.consume(code) + return contentContinue + } + + function after(code) { + effects.exit(types.codeFenced) + return ok(code) + } + + function tokenizeClosingFence(effects, ok, nok) { + var size = 0 + + return factorySpace( + effects, + closingSequenceStart, + types.linePrefix, + this.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + ) + + function closingSequenceStart(code) { + effects.enter(types.codeFencedFence) + effects.enter(types.codeFencedFenceSequence) + return closingSequence(code) + } + + function closingSequence(code) { + if (code === marker) { + effects.consume(code) + size++ + return closingSequence + } + + if (size < sizeOpen) return nok(code) + effects.exit(types.codeFencedFenceSequence) + return factorySpace(effects, closingSequenceEnd, types.whitespace)(code) + } + + function closingSequenceEnd(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.codeFencedFence) + return ok(code) + } + + return nok(code) + } + } +} + +module.exports = codeFenced diff --git a/node_modules/micromark/lib/tokenize/code-fenced.mjs b/node_modules/micromark/lib/tokenize/code-fenced.mjs new file mode 100644 index 00000000..14f83b1f --- /dev/null +++ b/node_modules/micromark/lib/tokenize/code-fenced.mjs @@ -0,0 +1,176 @@ +var codeFenced = { + name: 'codeFenced', + tokenize: tokenizeCodeFenced, + concrete: true +} +export default codeFenced + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import prefixSize from '../util/prefix-size.mjs' +import spaceFactory from './factory-space.mjs' + +function tokenizeCodeFenced(effects, ok, nok) { + var self = this + var closingFenceConstruct = {tokenize: tokenizeClosingFence, partial: true} + var initialPrefix = prefixSize(this.events, types.linePrefix) + var sizeOpen = 0 + var marker + + return start + + function start(code) { + assert( + code === codes.graveAccent || code === codes.tilde, + 'expected `` ` `` or `~`' + ) + effects.enter(types.codeFenced) + effects.enter(types.codeFencedFence) + effects.enter(types.codeFencedFenceSequence) + marker = code + return sequenceOpen(code) + } + + function sequenceOpen(code) { + if (code === marker) { + effects.consume(code) + sizeOpen++ + return sequenceOpen + } + + effects.exit(types.codeFencedFenceSequence) + return sizeOpen < constants.codeFencedSequenceSizeMin + ? nok(code) + : spaceFactory(effects, infoOpen, types.whitespace)(code) + } + + function infoOpen(code) { + if (code === codes.eof || markdownLineEnding(code)) { + return openAfter(code) + } + + effects.enter(types.codeFencedFenceInfo) + effects.enter(types.chunkString, {contentType: constants.contentTypeString}) + return info(code) + } + + function info(code) { + if (code === codes.eof || markdownLineEndingOrSpace(code)) { + effects.exit(types.chunkString) + effects.exit(types.codeFencedFenceInfo) + return spaceFactory(effects, infoAfter, types.whitespace)(code) + } + + if (code === codes.graveAccent && code === marker) return nok(code) + effects.consume(code) + return info + } + + function infoAfter(code) { + if (code === codes.eof || markdownLineEnding(code)) { + return openAfter(code) + } + + effects.enter(types.codeFencedFenceMeta) + effects.enter(types.chunkString, {contentType: constants.contentTypeString}) + return meta(code) + } + + function meta(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.chunkString) + effects.exit(types.codeFencedFenceMeta) + return openAfter(code) + } + + if (code === codes.graveAccent && code === marker) return nok(code) + effects.consume(code) + return meta + } + + function openAfter(code) { + effects.exit(types.codeFencedFence) + return self.interrupt ? ok(code) : content(code) + } + + function content(code) { + if (code === codes.eof) { + return after(code) + } + + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return effects.attempt( + closingFenceConstruct, + after, + initialPrefix + ? spaceFactory(effects, content, types.linePrefix, initialPrefix + 1) + : content + ) + } + + effects.enter(types.codeFlowValue) + return contentContinue(code) + } + + function contentContinue(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.codeFlowValue) + return content(code) + } + + effects.consume(code) + return contentContinue + } + + function after(code) { + effects.exit(types.codeFenced) + return ok(code) + } + + function tokenizeClosingFence(effects, ok, nok) { + var size = 0 + + return spaceFactory( + effects, + closingSequenceStart, + types.linePrefix, + this.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + ) + + function closingSequenceStart(code) { + effects.enter(types.codeFencedFence) + effects.enter(types.codeFencedFenceSequence) + return closingSequence(code) + } + + function closingSequence(code) { + if (code === marker) { + effects.consume(code) + size++ + return closingSequence + } + + if (size < sizeOpen) return nok(code) + effects.exit(types.codeFencedFenceSequence) + return spaceFactory(effects, closingSequenceEnd, types.whitespace)(code) + } + + function closingSequenceEnd(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.codeFencedFence) + return ok(code) + } + + return nok(code) + } + } +} diff --git a/node_modules/micromark/lib/tokenize/code-indented.js b/node_modules/micromark/lib/tokenize/code-indented.js new file mode 100644 index 00000000..8725366d --- /dev/null +++ b/node_modules/micromark/lib/tokenize/code-indented.js @@ -0,0 +1,91 @@ +'use strict' + +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var chunkedSplice = require('../util/chunked-splice.js') +var prefixSize = require('../util/prefix-size.js') +var factorySpace = require('./factory-space.js') + +var codeIndented = { + name: 'codeIndented', + tokenize: tokenizeCodeIndented, + resolve: resolveCodeIndented +} + +var indentedContentConstruct = { + tokenize: tokenizeIndentedContent, + partial: true +} + +function resolveCodeIndented(events, context) { + var code = { + type: types.codeIndented, + start: events[0][1].start, + end: events[events.length - 1][1].end + } + + chunkedSplice(events, 0, 0, [['enter', code, context]]) + chunkedSplice(events, events.length, 0, [['exit', code, context]]) + + return events +} + +function tokenizeCodeIndented(effects, ok, nok) { + return effects.attempt(indentedContentConstruct, afterPrefix, nok) + + function afterPrefix(code) { + if (code === codes.eof) { + return ok(code) + } + + if (markdownLineEnding(code)) { + return effects.attempt(indentedContentConstruct, afterPrefix, ok)(code) + } + + effects.enter(types.codeFlowValue) + return content(code) + } + + function content(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.codeFlowValue) + return afterPrefix(code) + } + + effects.consume(code) + return content + } +} + +function tokenizeIndentedContent(effects, ok, nok) { + var self = this + + return factorySpace( + effects, + afterPrefix, + types.linePrefix, + constants.tabSize + 1 + ) + + function afterPrefix(code) { + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return factorySpace( + effects, + afterPrefix, + types.linePrefix, + constants.tabSize + 1 + ) + } + + return prefixSize(self.events, types.linePrefix) < constants.tabSize + ? nok(code) + : ok(code) + } +} + +module.exports = codeIndented diff --git a/node_modules/micromark/lib/tokenize/code-indented.mjs b/node_modules/micromark/lib/tokenize/code-indented.mjs new file mode 100644 index 00000000..91919141 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/code-indented.mjs @@ -0,0 +1,88 @@ +var codeIndented = { + name: 'codeIndented', + tokenize: tokenizeCodeIndented, + resolve: resolveCodeIndented +} +export default codeIndented + +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import chunkedSplice from '../util/chunked-splice.mjs' +import prefixSize from '../util/prefix-size.mjs' +import spaceFactory from './factory-space.mjs' + +var indentedContentConstruct = { + tokenize: tokenizeIndentedContent, + partial: true +} + +function resolveCodeIndented(events, context) { + var code = { + type: types.codeIndented, + start: events[0][1].start, + end: events[events.length - 1][1].end + } + + chunkedSplice(events, 0, 0, [['enter', code, context]]) + chunkedSplice(events, events.length, 0, [['exit', code, context]]) + + return events +} + +function tokenizeCodeIndented(effects, ok, nok) { + return effects.attempt(indentedContentConstruct, afterPrefix, nok) + + function afterPrefix(code) { + if (code === codes.eof) { + return ok(code) + } + + if (markdownLineEnding(code)) { + return effects.attempt(indentedContentConstruct, afterPrefix, ok)(code) + } + + effects.enter(types.codeFlowValue) + return content(code) + } + + function content(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.codeFlowValue) + return afterPrefix(code) + } + + effects.consume(code) + return content + } +} + +function tokenizeIndentedContent(effects, ok, nok) { + var self = this + + return spaceFactory( + effects, + afterPrefix, + types.linePrefix, + constants.tabSize + 1 + ) + + function afterPrefix(code) { + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return spaceFactory( + effects, + afterPrefix, + types.linePrefix, + constants.tabSize + 1 + ) + } + + return prefixSize(self.events, types.linePrefix) < constants.tabSize + ? nok(code) + : ok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/code-text.js b/node_modules/micromark/lib/tokenize/code-text.js new file mode 100644 index 00000000..0eb1db81 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/code-text.js @@ -0,0 +1,191 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var types = require('../constant/types.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var codeText = { + name: 'codeText', + tokenize: tokenizeCodeText, + resolve: resolveCodeText, + previous: previous +} + +function resolveCodeText(events) { + var tailExitIndex = events.length - 4 + var headEnterIndex = 3 + var index + var enter + + // If we start and end with an EOL or a space. + if ( + (events[headEnterIndex][1].type === types.lineEnding || + events[headEnterIndex][1].type === 'space') && + (events[tailExitIndex][1].type === types.lineEnding || + events[tailExitIndex][1].type === 'space') + ) { + index = headEnterIndex + + // And we have data. + while (++index < tailExitIndex) { + if (events[index][1].type === types.codeTextData) { + // Then we have padding. + events[tailExitIndex][1].type = events[headEnterIndex][1].type = + types.codeTextPadding + headEnterIndex += 2 + tailExitIndex -= 2 + break + } + } + } + + // Merge adjacent spaces and data. + index = headEnterIndex - 1 + tailExitIndex++ + + while (++index <= tailExitIndex) { + if (enter === undefined) { + if ( + index !== tailExitIndex && + events[index][1].type !== types.lineEnding + ) { + enter = index + } + } else if ( + index === tailExitIndex || + events[index][1].type === types.lineEnding + ) { + events[enter][1].type = types.codeTextData + + if (index !== enter + 2) { + events[enter][1].end = events[index - 1][1].end + events.splice(enter + 2, index - enter - 2) + tailExitIndex -= index - enter - 2 + index = enter + 2 + } + + enter = undefined + } + } + + return events +} + +function previous(code) { + // If there is a previous code, there will always be a tail. + return ( + code !== codes.graveAccent || + this.events[this.events.length - 1][1].type === types.characterEscape + ) +} + +function tokenizeCodeText(effects, ok, nok) { + var self = this + var sizeOpen = 0 + var size + var token + + return start + + function start(code) { + assert__default['default'](code === codes.graveAccent, 'expected `` ` ``') + assert__default['default']( + previous.call(self, self.previous), + 'expected correct previous' + ) + effects.enter(types.codeText) + effects.enter(types.codeTextSequence) + return openingSequence(code) + } + + function openingSequence(code) { + if (code === codes.graveAccent) { + effects.consume(code) + sizeOpen++ + return openingSequence + } + + effects.exit(types.codeTextSequence) + return gap(code) + } + + function gap(code) { + // EOF. + if (code === codes.eof) { + return nok(code) + } + + // Closing fence? + // Could also be data. + if (code === codes.graveAccent) { + token = effects.enter(types.codeTextSequence) + size = 0 + return closingSequence(code) + } + + // Tabs don’t work, and virtual spaces don’t make sense. + if (code === codes.space) { + effects.enter('space') + effects.consume(code) + effects.exit('space') + return gap + } + + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return gap + } + + // Data. + effects.enter(types.codeTextData) + return data(code) + } + + // In code. + function data(code) { + if ( + code === codes.eof || + code === codes.space || + code === codes.graveAccent || + markdownLineEnding(code) + ) { + effects.exit(types.codeTextData) + return gap(code) + } + + effects.consume(code) + return data + } + + // Closing fence. + function closingSequence(code) { + // More. + if (code === codes.graveAccent) { + effects.consume(code) + size++ + return closingSequence + } + + // Done! + if (size === sizeOpen) { + effects.exit(types.codeTextSequence) + effects.exit(types.codeText) + return ok(code) + } + + // More or less accents: mark as data. + token.type = types.codeTextData + return data(code) + } +} + +module.exports = codeText diff --git a/node_modules/micromark/lib/tokenize/code-text.mjs b/node_modules/micromark/lib/tokenize/code-text.mjs new file mode 100644 index 00000000..7c44b659 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/code-text.mjs @@ -0,0 +1,179 @@ +var codeText = { + name: 'codeText', + tokenize: tokenizeCodeText, + resolve: resolveCodeText, + previous: previous +} +export default codeText + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import types from '../constant/types.mjs' + +function resolveCodeText(events) { + var tailExitIndex = events.length - 4 + var headEnterIndex = 3 + var index + var enter + + // If we start and end with an EOL or a space. + if ( + (events[headEnterIndex][1].type === types.lineEnding || + events[headEnterIndex][1].type === 'space') && + (events[tailExitIndex][1].type === types.lineEnding || + events[tailExitIndex][1].type === 'space') + ) { + index = headEnterIndex + + // And we have data. + while (++index < tailExitIndex) { + if (events[index][1].type === types.codeTextData) { + // Then we have padding. + events[tailExitIndex][1].type = events[headEnterIndex][1].type = + types.codeTextPadding + headEnterIndex += 2 + tailExitIndex -= 2 + break + } + } + } + + // Merge adjacent spaces and data. + index = headEnterIndex - 1 + tailExitIndex++ + + while (++index <= tailExitIndex) { + if (enter === undefined) { + if ( + index !== tailExitIndex && + events[index][1].type !== types.lineEnding + ) { + enter = index + } + } else if ( + index === tailExitIndex || + events[index][1].type === types.lineEnding + ) { + events[enter][1].type = types.codeTextData + + if (index !== enter + 2) { + events[enter][1].end = events[index - 1][1].end + events.splice(enter + 2, index - enter - 2) + tailExitIndex -= index - enter - 2 + index = enter + 2 + } + + enter = undefined + } + } + + return events +} + +function previous(code) { + // If there is a previous code, there will always be a tail. + return ( + code !== codes.graveAccent || + this.events[this.events.length - 1][1].type === types.characterEscape + ) +} + +function tokenizeCodeText(effects, ok, nok) { + var self = this + var sizeOpen = 0 + var size + var token + + return start + + function start(code) { + assert(code === codes.graveAccent, 'expected `` ` ``') + assert(previous.call(self, self.previous), 'expected correct previous') + effects.enter(types.codeText) + effects.enter(types.codeTextSequence) + return openingSequence(code) + } + + function openingSequence(code) { + if (code === codes.graveAccent) { + effects.consume(code) + sizeOpen++ + return openingSequence + } + + effects.exit(types.codeTextSequence) + return gap(code) + } + + function gap(code) { + // EOF. + if (code === codes.eof) { + return nok(code) + } + + // Closing fence? + // Could also be data. + if (code === codes.graveAccent) { + token = effects.enter(types.codeTextSequence) + size = 0 + return closingSequence(code) + } + + // Tabs don’t work, and virtual spaces don’t make sense. + if (code === codes.space) { + effects.enter('space') + effects.consume(code) + effects.exit('space') + return gap + } + + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return gap + } + + // Data. + effects.enter(types.codeTextData) + return data(code) + } + + // In code. + function data(code) { + if ( + code === codes.eof || + code === codes.space || + code === codes.graveAccent || + markdownLineEnding(code) + ) { + effects.exit(types.codeTextData) + return gap(code) + } + + effects.consume(code) + return data + } + + // Closing fence. + function closingSequence(code) { + // More. + if (code === codes.graveAccent) { + effects.consume(code) + size++ + return closingSequence + } + + // Done! + if (size === sizeOpen) { + effects.exit(types.codeTextSequence) + effects.exit(types.codeText) + return ok(code) + } + + // More or less accents: mark as data. + token.type = types.codeTextData + return data(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/content.js b/node_modules/micromark/lib/tokenize/content.js new file mode 100644 index 00000000..cb763ec5 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/content.js @@ -0,0 +1,121 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var prefixSize = require('../util/prefix-size.js') +var subtokenize = require('../util/subtokenize.js') +var factorySpace = require('./factory-space.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +// No name because it must not be turned off. +var content = { + tokenize: tokenizeContent, + resolve: resolveContent, + interruptible: true, + lazy: true +} + +var continuationConstruct = {tokenize: tokenizeContinuation, partial: true} + +// Content is transparent: it’s parsed right now. That way, definitions are also +// parsed right now: before text in paragraphs (specifically, media) are parsed. +function resolveContent(events) { + subtokenize(events) + return events +} + +function tokenizeContent(effects, ok) { + var previous + + return start + + function start(code) { + assert__default['default']( + code !== codes.eof && !markdownLineEnding(code), + 'expected no eof or eol' + ) + + effects.enter(types.content) + previous = effects.enter(types.chunkContent, { + contentType: constants.contentTypeContent + }) + return data(code) + } + + function data(code) { + if (code === codes.eof) { + return contentEnd(code) + } + + if (markdownLineEnding(code)) { + return effects.check( + continuationConstruct, + contentContinue, + contentEnd + )(code) + } + + // Data. + effects.consume(code) + return data + } + + function contentEnd(code) { + effects.exit(types.chunkContent) + effects.exit(types.content) + return ok(code) + } + + function contentContinue(code) { + assert__default['default'](markdownLineEnding(code), 'expected eol') + effects.consume(code) + effects.exit(types.chunkContent) + previous = previous.next = effects.enter(types.chunkContent, { + contentType: constants.contentTypeContent, + previous: previous + }) + return data + } +} + +function tokenizeContinuation(effects, ok, nok) { + var self = this + + return startLookahead + + function startLookahead(code) { + assert__default['default']( + markdownLineEnding(code), + 'expected a line ending' + ) + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return factorySpace(effects, prefixed, types.linePrefix) + } + + function prefixed(code) { + if (code === codes.eof || markdownLineEnding(code)) { + return nok(code) + } + + if ( + self.parser.constructs.disable.null.indexOf('codeIndented') > -1 || + prefixSize(self.events, types.linePrefix) < constants.tabSize + ) { + return effects.interrupt(self.parser.constructs.flow, nok, ok)(code) + } + + return ok(code) + } +} + +module.exports = content diff --git a/node_modules/micromark/lib/tokenize/content.mjs b/node_modules/micromark/lib/tokenize/content.mjs new file mode 100644 index 00000000..ca9c2e15 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/content.mjs @@ -0,0 +1,109 @@ +// No name because it must not be turned off. +var content = { + tokenize: tokenizeContent, + resolve: resolveContent, + interruptible: true, + lazy: true +} +export default content + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import prefixSize from '../util/prefix-size.mjs' +import subtokenize from '../util/subtokenize.mjs' +import spaceFactory from './factory-space.mjs' + +var continuationConstruct = {tokenize: tokenizeContinuation, partial: true} + +// Content is transparent: it’s parsed right now. That way, definitions are also +// parsed right now: before text in paragraphs (specifically, media) are parsed. +function resolveContent(events) { + subtokenize(events) + return events +} + +function tokenizeContent(effects, ok) { + var previous + + return start + + function start(code) { + assert( + code !== codes.eof && !markdownLineEnding(code), + 'expected no eof or eol' + ) + + effects.enter(types.content) + previous = effects.enter(types.chunkContent, { + contentType: constants.contentTypeContent + }) + return data(code) + } + + function data(code) { + if (code === codes.eof) { + return contentEnd(code) + } + + if (markdownLineEnding(code)) { + return effects.check( + continuationConstruct, + contentContinue, + contentEnd + )(code) + } + + // Data. + effects.consume(code) + return data + } + + function contentEnd(code) { + effects.exit(types.chunkContent) + effects.exit(types.content) + return ok(code) + } + + function contentContinue(code) { + assert(markdownLineEnding(code), 'expected eol') + effects.consume(code) + effects.exit(types.chunkContent) + previous = previous.next = effects.enter(types.chunkContent, { + contentType: constants.contentTypeContent, + previous: previous + }) + return data + } +} + +function tokenizeContinuation(effects, ok, nok) { + var self = this + + return startLookahead + + function startLookahead(code) { + assert(markdownLineEnding(code), 'expected a line ending') + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return spaceFactory(effects, prefixed, types.linePrefix) + } + + function prefixed(code) { + if (code === codes.eof || markdownLineEnding(code)) { + return nok(code) + } + + if ( + self.parser.constructs.disable.null.indexOf('codeIndented') > -1 || + prefixSize(self.events, types.linePrefix) < constants.tabSize + ) { + return effects.interrupt(self.parser.constructs.flow, nok, ok)(code) + } + + return ok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/definition.js b/node_modules/micromark/lib/tokenize/definition.js new file mode 100644 index 00000000..c4604d57 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/definition.js @@ -0,0 +1,129 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var types = require('../constant/types.js') +var normalizeIdentifier = require('../util/normalize-identifier.js') +var factoryDestination = require('./factory-destination.js') +var factoryLabel = require('./factory-label.js') +var factorySpace = require('./factory-space.js') +var factoryWhitespace = require('./factory-whitespace.js') +var factoryTitle = require('./factory-title.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var definition = { + name: 'definition', + tokenize: tokenizeDefinition +} + +var titleConstruct = {tokenize: tokenizeTitle, partial: true} + +function tokenizeDefinition(effects, ok, nok) { + var self = this + var identifier + + return start + + function start(code) { + assert__default['default'](code === codes.leftSquareBracket, 'expected `[`') + effects.enter(types.definition) + return factoryLabel.call( + self, + effects, + labelAfter, + nok, + types.definitionLabel, + types.definitionLabelMarker, + types.definitionLabelString + )(code) + } + + function labelAfter(code) { + identifier = normalizeIdentifier( + self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1) + ) + + if (code === codes.colon) { + effects.enter(types.definitionMarker) + effects.consume(code) + effects.exit(types.definitionMarker) + + // Note: blank lines can’t exist in content. + return factoryWhitespace( + effects, + factoryDestination( + effects, + effects.attempt( + titleConstruct, + factorySpace(effects, after, types.whitespace), + factorySpace(effects, after, types.whitespace) + ), + nok, + types.definitionDestination, + types.definitionDestinationLiteral, + types.definitionDestinationLiteralMarker, + types.definitionDestinationRaw, + types.definitionDestinationString + ) + ) + } + + return nok(code) + } + + function after(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.definition) + + if (self.parser.defined.indexOf(identifier) < 0) { + self.parser.defined.push(identifier) + } + + return ok(code) + } + + return nok(code) + } +} + +function tokenizeTitle(effects, ok, nok) { + return start + + function start(code) { + return markdownLineEndingOrSpace(code) + ? factoryWhitespace(effects, before)(code) + : nok(code) + } + + function before(code) { + if ( + code === codes.quotationMark || + code === codes.apostrophe || + code === codes.leftParenthesis + ) { + return factoryTitle( + effects, + factorySpace(effects, after, types.whitespace), + nok, + types.definitionTitle, + types.definitionTitleMarker, + types.definitionTitleString + )(code) + } + + return nok(code) + } + + function after(code) { + return code === codes.eof || markdownLineEnding(code) ? ok(code) : nok(code) + } +} + +module.exports = definition diff --git a/node_modules/micromark/lib/tokenize/definition.mjs b/node_modules/micromark/lib/tokenize/definition.mjs new file mode 100644 index 00000000..5cc0dde8 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/definition.mjs @@ -0,0 +1,120 @@ +var definition = { + name: 'definition', + tokenize: tokenizeDefinition +} +export default definition + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs' +import types from '../constant/types.mjs' +import normalizeIdentifier from '../util/normalize-identifier.mjs' +import destinationFactory from './factory-destination.mjs' +import labelFactory from './factory-label.mjs' +import spaceFactory from './factory-space.mjs' +import whitespaceFactory from './factory-whitespace.mjs' +import titleFactory from './factory-title.mjs' + +var titleConstruct = {tokenize: tokenizeTitle, partial: true} + +function tokenizeDefinition(effects, ok, nok) { + var self = this + var identifier + + return start + + function start(code) { + assert(code === codes.leftSquareBracket, 'expected `[`') + effects.enter(types.definition) + return labelFactory.call( + self, + effects, + labelAfter, + nok, + types.definitionLabel, + types.definitionLabelMarker, + types.definitionLabelString + )(code) + } + + function labelAfter(code) { + identifier = normalizeIdentifier( + self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1) + ) + + if (code === codes.colon) { + effects.enter(types.definitionMarker) + effects.consume(code) + effects.exit(types.definitionMarker) + + // Note: blank lines can’t exist in content. + return whitespaceFactory( + effects, + destinationFactory( + effects, + effects.attempt( + titleConstruct, + spaceFactory(effects, after, types.whitespace), + spaceFactory(effects, after, types.whitespace) + ), + nok, + types.definitionDestination, + types.definitionDestinationLiteral, + types.definitionDestinationLiteralMarker, + types.definitionDestinationRaw, + types.definitionDestinationString + ) + ) + } + + return nok(code) + } + + function after(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.definition) + + if (self.parser.defined.indexOf(identifier) < 0) { + self.parser.defined.push(identifier) + } + + return ok(code) + } + + return nok(code) + } +} + +function tokenizeTitle(effects, ok, nok) { + return start + + function start(code) { + return markdownLineEndingOrSpace(code) + ? whitespaceFactory(effects, before)(code) + : nok(code) + } + + function before(code) { + if ( + code === codes.quotationMark || + code === codes.apostrophe || + code === codes.leftParenthesis + ) { + return titleFactory( + effects, + spaceFactory(effects, after, types.whitespace), + nok, + types.definitionTitle, + types.definitionTitleMarker, + types.definitionTitleString + )(code) + } + + return nok(code) + } + + function after(code) { + return code === codes.eof || markdownLineEnding(code) ? ok(code) : nok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/factory-destination.js b/node_modules/micromark/lib/tokenize/factory-destination.js new file mode 100644 index 00000000..d746cd01 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/factory-destination.js @@ -0,0 +1,145 @@ +'use strict' + +var asciiControl = require('../character/ascii-control.js') +var codes = require('../character/codes.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') + +// eslint-disable-next-line max-params +function destinationFactory( + effects, + ok, + nok, + type, + literalType, + literalMarkerType, + rawType, + stringType, + max +) { + var limit = max || Infinity + var balance = 0 + + return start + + function start(code) { + if (code === codes.lessThan) { + effects.enter(type) + effects.enter(literalType) + effects.enter(literalMarkerType) + effects.consume(code) + effects.exit(literalMarkerType) + return destinationEnclosedBefore + } + + if (asciiControl(code) || code === codes.rightParenthesis) { + return nok(code) + } + + effects.enter(type) + effects.enter(rawType) + effects.enter(stringType) + effects.enter(types.chunkString, {contentType: constants.contentTypeString}) + return destinationRaw(code) + } + + function destinationEnclosedBefore(code) { + if (code === codes.greaterThan) { + effects.enter(literalMarkerType) + effects.consume(code) + effects.exit(literalMarkerType) + effects.exit(literalType) + effects.exit(type) + return ok + } + + effects.enter(stringType) + effects.enter(types.chunkString, {contentType: constants.contentTypeString}) + return destinationEnclosed(code) + } + + function destinationEnclosed(code) { + if (code === codes.greaterThan) { + effects.exit(types.chunkString) + effects.exit(stringType) + return destinationEnclosedBefore(code) + } + + if ( + code === codes.eof || + code === codes.lessThan || + markdownLineEnding(code) + ) { + return nok(code) + } + + effects.consume(code) + return code === codes.backslash + ? destinationEnclosedEscape + : destinationEnclosed + } + + function destinationEnclosedEscape(code) { + if ( + code === codes.lessThan || + code === codes.greaterThan || + code === codes.backslash + ) { + effects.consume(code) + return destinationEnclosed + } + + return destinationEnclosed(code) + } + + function destinationRaw(code) { + if (code === codes.leftParenthesis) { + if (++balance > limit) return nok(code) + effects.consume(code) + return destinationRaw + } + + if (code === codes.rightParenthesis) { + if (!balance--) { + effects.exit(types.chunkString) + effects.exit(stringType) + effects.exit(rawType) + effects.exit(type) + return ok(code) + } + + effects.consume(code) + return destinationRaw + } + + if (code === codes.eof || markdownLineEndingOrSpace(code)) { + if (balance) return nok(code) + effects.exit(types.chunkString) + effects.exit(stringType) + effects.exit(rawType) + effects.exit(type) + return ok(code) + } + + if (asciiControl(code)) return nok(code) + effects.consume(code) + return code === codes.backslash ? destinationRawEscape : destinationRaw + } + + function destinationRawEscape(code) { + if ( + code === codes.leftParenthesis || + code === codes.rightParenthesis || + code === codes.backslash + ) { + effects.consume(code) + return destinationRaw + } + + return destinationRaw(code) + } +} + +module.exports = destinationFactory diff --git a/node_modules/micromark/lib/tokenize/factory-destination.mjs b/node_modules/micromark/lib/tokenize/factory-destination.mjs new file mode 100644 index 00000000..be8cf2bd --- /dev/null +++ b/node_modules/micromark/lib/tokenize/factory-destination.mjs @@ -0,0 +1,143 @@ +export default destinationFactory + +import asciiControl from '../character/ascii-control.mjs' +import codes from '../character/codes.mjs' +import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' + +// eslint-disable-next-line max-params +function destinationFactory( + effects, + ok, + nok, + type, + literalType, + literalMarkerType, + rawType, + stringType, + max +) { + var limit = max || Infinity + var balance = 0 + + return start + + function start(code) { + if (code === codes.lessThan) { + effects.enter(type) + effects.enter(literalType) + effects.enter(literalMarkerType) + effects.consume(code) + effects.exit(literalMarkerType) + return destinationEnclosedBefore + } + + if (asciiControl(code) || code === codes.rightParenthesis) { + return nok(code) + } + + effects.enter(type) + effects.enter(rawType) + effects.enter(stringType) + effects.enter(types.chunkString, {contentType: constants.contentTypeString}) + return destinationRaw(code) + } + + function destinationEnclosedBefore(code) { + if (code === codes.greaterThan) { + effects.enter(literalMarkerType) + effects.consume(code) + effects.exit(literalMarkerType) + effects.exit(literalType) + effects.exit(type) + return ok + } + + effects.enter(stringType) + effects.enter(types.chunkString, {contentType: constants.contentTypeString}) + return destinationEnclosed(code) + } + + function destinationEnclosed(code) { + if (code === codes.greaterThan) { + effects.exit(types.chunkString) + effects.exit(stringType) + return destinationEnclosedBefore(code) + } + + if ( + code === codes.eof || + code === codes.lessThan || + markdownLineEnding(code) + ) { + return nok(code) + } + + effects.consume(code) + return code === codes.backslash + ? destinationEnclosedEscape + : destinationEnclosed + } + + function destinationEnclosedEscape(code) { + if ( + code === codes.lessThan || + code === codes.greaterThan || + code === codes.backslash + ) { + effects.consume(code) + return destinationEnclosed + } + + return destinationEnclosed(code) + } + + function destinationRaw(code) { + if (code === codes.leftParenthesis) { + if (++balance > limit) return nok(code) + effects.consume(code) + return destinationRaw + } + + if (code === codes.rightParenthesis) { + if (!balance--) { + effects.exit(types.chunkString) + effects.exit(stringType) + effects.exit(rawType) + effects.exit(type) + return ok(code) + } + + effects.consume(code) + return destinationRaw + } + + if (code === codes.eof || markdownLineEndingOrSpace(code)) { + if (balance) return nok(code) + effects.exit(types.chunkString) + effects.exit(stringType) + effects.exit(rawType) + effects.exit(type) + return ok(code) + } + + if (asciiControl(code)) return nok(code) + effects.consume(code) + return code === codes.backslash ? destinationRawEscape : destinationRaw + } + + function destinationRawEscape(code) { + if ( + code === codes.leftParenthesis || + code === codes.rightParenthesis || + code === codes.backslash + ) { + effects.consume(code) + return destinationRaw + } + + return destinationRaw(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/factory-label.js b/node_modules/micromark/lib/tokenize/factory-label.js new file mode 100644 index 00000000..64d96d78 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/factory-label.js @@ -0,0 +1,102 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownSpace = require('../character/markdown-space.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +// eslint-disable-next-line max-params +function labelFactory(effects, ok, nok, type, markerType, stringType) { + var self = this + var size = 0 + var data + + return start + + function start(code) { + assert__default['default'](code === codes.leftSquareBracket, 'expected `[`') + effects.enter(type) + effects.enter(markerType) + effects.consume(code) + effects.exit(markerType) + effects.enter(stringType) + return atBreak + } + + function atBreak(code) { + if ( + code === codes.eof || + code === codes.leftSquareBracket || + (code === codes.rightSquareBracket && !data) || + /* c8 ignore next */ + (code === codes.caret && + /* c8 ignore next */ + !size && + /* c8 ignore next */ + '_hiddenFootnoteSupport' in self.parser.constructs) || + size > constants.linkReferenceSizeMax + ) { + return nok(code) + } + + if (code === codes.rightSquareBracket) { + effects.exit(stringType) + effects.enter(markerType) + effects.consume(code) + effects.exit(markerType) + effects.exit(type) + return ok + } + + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return atBreak + } + + effects.enter(types.chunkString, {contentType: constants.contentTypeString}) + return label(code) + } + + function label(code) { + if ( + code === codes.eof || + code === codes.leftSquareBracket || + code === codes.rightSquareBracket || + markdownLineEnding(code) || + size++ > constants.linkReferenceSizeMax + ) { + effects.exit(types.chunkString) + return atBreak(code) + } + + effects.consume(code) + data = data || !markdownSpace(code) + return code === codes.backslash ? labelEscape : label + } + + function labelEscape(code) { + if ( + code === codes.leftSquareBracket || + code === codes.backslash || + code === codes.rightSquareBracket + ) { + effects.consume(code) + size++ + return label + } + + return label(code) + } +} + +module.exports = labelFactory diff --git a/node_modules/micromark/lib/tokenize/factory-label.mjs b/node_modules/micromark/lib/tokenize/factory-label.mjs new file mode 100644 index 00000000..eccdbd5b --- /dev/null +++ b/node_modules/micromark/lib/tokenize/factory-label.mjs @@ -0,0 +1,94 @@ +export default labelFactory + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import markdownSpace from '../character/markdown-space.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' + +// eslint-disable-next-line max-params +function labelFactory(effects, ok, nok, type, markerType, stringType) { + var self = this + var size = 0 + var data + + return start + + function start(code) { + assert(code === codes.leftSquareBracket, 'expected `[`') + effects.enter(type) + effects.enter(markerType) + effects.consume(code) + effects.exit(markerType) + effects.enter(stringType) + return atBreak + } + + function atBreak(code) { + if ( + code === codes.eof || + code === codes.leftSquareBracket || + (code === codes.rightSquareBracket && !data) || + /* c8 ignore next */ + (code === codes.caret && + /* c8 ignore next */ + !size && + /* c8 ignore next */ + '_hiddenFootnoteSupport' in self.parser.constructs) || + size > constants.linkReferenceSizeMax + ) { + return nok(code) + } + + if (code === codes.rightSquareBracket) { + effects.exit(stringType) + effects.enter(markerType) + effects.consume(code) + effects.exit(markerType) + effects.exit(type) + return ok + } + + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return atBreak + } + + effects.enter(types.chunkString, {contentType: constants.contentTypeString}) + return label(code) + } + + function label(code) { + if ( + code === codes.eof || + code === codes.leftSquareBracket || + code === codes.rightSquareBracket || + markdownLineEnding(code) || + size++ > constants.linkReferenceSizeMax + ) { + effects.exit(types.chunkString) + return atBreak(code) + } + + effects.consume(code) + data = data || !markdownSpace(code) + return code === codes.backslash ? labelEscape : label + } + + function labelEscape(code) { + if ( + code === codes.leftSquareBracket || + code === codes.backslash || + code === codes.rightSquareBracket + ) { + effects.consume(code) + size++ + return label + } + + return label(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/factory-space.js b/node_modules/micromark/lib/tokenize/factory-space.js new file mode 100644 index 00000000..d907c5dc --- /dev/null +++ b/node_modules/micromark/lib/tokenize/factory-space.js @@ -0,0 +1,31 @@ +'use strict' + +var markdownSpace = require('../character/markdown-space.js') + +function spaceFactory(effects, ok, type, max) { + var limit = max ? max - 1 : Infinity + var size = 0 + + return start + + function start(code) { + if (markdownSpace(code)) { + effects.enter(type) + return prefix(code) + } + + return ok(code) + } + + function prefix(code) { + if (markdownSpace(code) && size++ < limit) { + effects.consume(code) + return prefix + } + + effects.exit(type) + return ok(code) + } +} + +module.exports = spaceFactory diff --git a/node_modules/micromark/lib/tokenize/factory-space.mjs b/node_modules/micromark/lib/tokenize/factory-space.mjs new file mode 100644 index 00000000..9668400d --- /dev/null +++ b/node_modules/micromark/lib/tokenize/factory-space.mjs @@ -0,0 +1,29 @@ +export default spaceFactory + +import markdownSpace from '../character/markdown-space.mjs' + +function spaceFactory(effects, ok, type, max) { + var limit = max ? max - 1 : Infinity + var size = 0 + + return start + + function start(code) { + if (markdownSpace(code)) { + effects.enter(type) + return prefix(code) + } + + return ok(code) + } + + function prefix(code) { + if (markdownSpace(code) && size++ < limit) { + effects.consume(code) + return prefix + } + + effects.exit(type) + return ok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/factory-title.js b/node_modules/micromark/lib/tokenize/factory-title.js new file mode 100644 index 00000000..a5d6349b --- /dev/null +++ b/node_modules/micromark/lib/tokenize/factory-title.js @@ -0,0 +1,92 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var factorySpace = require('./factory-space.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +// eslint-disable-next-line max-params +function titleFactory(effects, ok, nok, type, markerType, stringType) { + var marker + + return start + + function start(code) { + assert__default['default']( + code === codes.quotationMark || + code === codes.apostrophe || + code === codes.leftParenthesis, + 'expected `"`, `\'`, or `(`' + ) + effects.enter(type) + effects.enter(markerType) + effects.consume(code) + effects.exit(markerType) + marker = code === codes.leftParenthesis ? codes.rightParenthesis : code + return atFirstTitleBreak + } + + function atFirstTitleBreak(code) { + if (code === marker) { + effects.enter(markerType) + effects.consume(code) + effects.exit(markerType) + effects.exit(type) + return ok + } + + effects.enter(stringType) + return atTitleBreak(code) + } + + function atTitleBreak(code) { + if (code === marker) { + effects.exit(stringType) + return atFirstTitleBreak(marker) + } + + if (code === codes.eof) { + return nok(code) + } + + // Note: blank lines can’t exist in content. + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return factorySpace(effects, atTitleBreak, types.linePrefix) + } + + effects.enter(types.chunkString, {contentType: constants.contentTypeString}) + return title(code) + } + + function title(code) { + if (code === marker || code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.chunkString) + return atTitleBreak(code) + } + + effects.consume(code) + return code === codes.backslash ? titleEscape : title + } + + function titleEscape(code) { + if (code === marker || code === codes.backslash) { + effects.consume(code) + return title + } + + return title(code) + } +} + +module.exports = titleFactory diff --git a/node_modules/micromark/lib/tokenize/factory-title.mjs b/node_modules/micromark/lib/tokenize/factory-title.mjs new file mode 100644 index 00000000..5ac4405e --- /dev/null +++ b/node_modules/micromark/lib/tokenize/factory-title.mjs @@ -0,0 +1,84 @@ +export default titleFactory + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import spaceFactory from './factory-space.mjs' + +// eslint-disable-next-line max-params +function titleFactory(effects, ok, nok, type, markerType, stringType) { + var marker + + return start + + function start(code) { + assert( + code === codes.quotationMark || + code === codes.apostrophe || + code === codes.leftParenthesis, + 'expected `"`, `\'`, or `(`' + ) + effects.enter(type) + effects.enter(markerType) + effects.consume(code) + effects.exit(markerType) + marker = code === codes.leftParenthesis ? codes.rightParenthesis : code + return atFirstTitleBreak + } + + function atFirstTitleBreak(code) { + if (code === marker) { + effects.enter(markerType) + effects.consume(code) + effects.exit(markerType) + effects.exit(type) + return ok + } + + effects.enter(stringType) + return atTitleBreak(code) + } + + function atTitleBreak(code) { + if (code === marker) { + effects.exit(stringType) + return atFirstTitleBreak(marker) + } + + if (code === codes.eof) { + return nok(code) + } + + // Note: blank lines can’t exist in content. + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return spaceFactory(effects, atTitleBreak, types.linePrefix) + } + + effects.enter(types.chunkString, {contentType: constants.contentTypeString}) + return title(code) + } + + function title(code) { + if (code === marker || code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.chunkString) + return atTitleBreak(code) + } + + effects.consume(code) + return code === codes.backslash ? titleEscape : title + } + + function titleEscape(code) { + if (code === marker || code === codes.backslash) { + effects.consume(code) + return title + } + + return title(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/factory-whitespace.js b/node_modules/micromark/lib/tokenize/factory-whitespace.js new file mode 100644 index 00000000..ae0ce966 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/factory-whitespace.js @@ -0,0 +1,34 @@ +'use strict' + +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownSpace = require('../character/markdown-space.js') +var types = require('../constant/types.js') +var factorySpace = require('./factory-space.js') + +function whitespaceFactory(effects, ok) { + var seen + + return start + + function start(code) { + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + seen = true + return start + } + + if (markdownSpace(code)) { + return factorySpace( + effects, + start, + seen ? types.linePrefix : types.lineSuffix + )(code) + } + + return ok(code) + } +} + +module.exports = whitespaceFactory diff --git a/node_modules/micromark/lib/tokenize/factory-whitespace.mjs b/node_modules/micromark/lib/tokenize/factory-whitespace.mjs new file mode 100644 index 00000000..8bea8fd2 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/factory-whitespace.mjs @@ -0,0 +1,32 @@ +export default whitespaceFactory + +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import markdownSpace from '../character/markdown-space.mjs' +import types from '../constant/types.mjs' +import spaceFactory from './factory-space.mjs' + +function whitespaceFactory(effects, ok) { + var seen + + return start + + function start(code) { + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + seen = true + return start + } + + if (markdownSpace(code)) { + return spaceFactory( + effects, + start, + seen ? types.linePrefix : types.lineSuffix + )(code) + } + + return ok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/hard-break-escape.js b/node_modules/micromark/lib/tokenize/hard-break-escape.js new file mode 100644 index 00000000..38955eca --- /dev/null +++ b/node_modules/micromark/lib/tokenize/hard-break-escape.js @@ -0,0 +1,41 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var types = require('../constant/types.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var hardBreakEscape = { + name: 'hardBreakEscape', + tokenize: tokenizeHardBreakEscape +} + +function tokenizeHardBreakEscape(effects, ok, nok) { + return start + + function start(code) { + assert__default['default'](code === codes.backslash, 'expected `\\`') + effects.enter(types.hardBreakEscape) + effects.enter(types.escapeMarker) + effects.consume(code) + return open + } + + function open(code) { + if (markdownLineEnding(code)) { + effects.exit(types.escapeMarker) + effects.exit(types.hardBreakEscape) + return ok(code) + } + + return nok(code) + } +} + +module.exports = hardBreakEscape diff --git a/node_modules/micromark/lib/tokenize/hard-break-escape.mjs b/node_modules/micromark/lib/tokenize/hard-break-escape.mjs new file mode 100644 index 00000000..0b23062d --- /dev/null +++ b/node_modules/micromark/lib/tokenize/hard-break-escape.mjs @@ -0,0 +1,32 @@ +var hardBreakEscape = { + name: 'hardBreakEscape', + tokenize: tokenizeHardBreakEscape +} +export default hardBreakEscape + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import types from '../constant/types.mjs' + +function tokenizeHardBreakEscape(effects, ok, nok) { + return start + + function start(code) { + assert(code === codes.backslash, 'expected `\\`') + effects.enter(types.hardBreakEscape) + effects.enter(types.escapeMarker) + effects.consume(code) + return open + } + + function open(code) { + if (markdownLineEnding(code)) { + effects.exit(types.escapeMarker) + effects.exit(types.hardBreakEscape) + return ok(code) + } + + return nok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/heading-atx.js b/node_modules/micromark/lib/tokenize/heading-atx.js new file mode 100644 index 00000000..a3bfd060 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/heading-atx.js @@ -0,0 +1,151 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var markdownSpace = require('../character/markdown-space.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var chunkedSplice = require('../util/chunked-splice.js') +var factorySpace = require('./factory-space.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var headingAtx = { + name: 'headingAtx', + tokenize: tokenizeHeadingAtx, + resolve: resolveHeadingAtx +} + +function resolveHeadingAtx(events, context) { + var contentEnd = events.length - 2 + var contentStart = 3 + var content + var text + + // Prefix whitespace, part of the opening. + if (events[contentStart][1].type === types.whitespace) { + contentStart += 2 + } + + // Suffix whitespace, part of the closing. + if ( + contentEnd - 2 > contentStart && + events[contentEnd][1].type === types.whitespace + ) { + contentEnd -= 2 + } + + if ( + events[contentEnd][1].type === types.atxHeadingSequence && + (contentStart === contentEnd - 1 || + (contentEnd - 4 > contentStart && + events[contentEnd - 2][1].type === types.whitespace)) + ) { + contentEnd -= contentStart + 1 === contentEnd ? 2 : 4 + } + + if (contentEnd > contentStart) { + content = { + type: types.atxHeadingText, + start: events[contentStart][1].start, + end: events[contentEnd][1].end + } + text = { + type: types.chunkText, + start: events[contentStart][1].start, + end: events[contentEnd][1].end, + contentType: constants.contentTypeText + } + + chunkedSplice(events, contentStart, contentEnd - contentStart + 1, [ + ['enter', content, context], + ['enter', text, context], + ['exit', text, context], + ['exit', content, context] + ]) + } + + return events +} + +function tokenizeHeadingAtx(effects, ok, nok) { + var self = this + var size = 0 + + return start + + function start(code) { + assert__default['default'](code === codes.numberSign, 'expected `#`') + effects.enter(types.atxHeading) + effects.enter(types.atxHeadingSequence) + return fenceOpenInside(code) + } + + function fenceOpenInside(code) { + if ( + code === codes.numberSign && + size++ < constants.atxHeadingOpeningFenceSizeMax + ) { + effects.consume(code) + return fenceOpenInside + } + + if (code === codes.eof || markdownLineEndingOrSpace(code)) { + effects.exit(types.atxHeadingSequence) + return self.interrupt ? ok(code) : headingBreak(code) + } + + return nok(code) + } + + function headingBreak(code) { + if (code === codes.numberSign) { + effects.enter(types.atxHeadingSequence) + return sequence(code) + } + + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.atxHeading) + return ok(code) + } + + if (markdownSpace(code)) { + return factorySpace(effects, headingBreak, types.whitespace)(code) + } + + effects.enter(types.atxHeadingText) + return data(code) + } + + function sequence(code) { + if (code === codes.numberSign) { + effects.consume(code) + return sequence + } + + effects.exit(types.atxHeadingSequence) + return headingBreak(code) + } + + function data(code) { + if ( + code === codes.eof || + code === codes.numberSign || + markdownLineEndingOrSpace(code) + ) { + effects.exit(types.atxHeadingText) + return headingBreak(code) + } + + effects.consume(code) + return data + } +} + +module.exports = headingAtx diff --git a/node_modules/micromark/lib/tokenize/heading-atx.mjs b/node_modules/micromark/lib/tokenize/heading-atx.mjs new file mode 100644 index 00000000..1a5ed07f --- /dev/null +++ b/node_modules/micromark/lib/tokenize/heading-atx.mjs @@ -0,0 +1,142 @@ +var headingAtx = { + name: 'headingAtx', + tokenize: tokenizeHeadingAtx, + resolve: resolveHeadingAtx +} +export default headingAtx + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs' +import markdownSpace from '../character/markdown-space.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import chunkedSplice from '../util/chunked-splice.mjs' +import spaceFactory from './factory-space.mjs' + +function resolveHeadingAtx(events, context) { + var contentEnd = events.length - 2 + var contentStart = 3 + var content + var text + + // Prefix whitespace, part of the opening. + if (events[contentStart][1].type === types.whitespace) { + contentStart += 2 + } + + // Suffix whitespace, part of the closing. + if ( + contentEnd - 2 > contentStart && + events[contentEnd][1].type === types.whitespace + ) { + contentEnd -= 2 + } + + if ( + events[contentEnd][1].type === types.atxHeadingSequence && + (contentStart === contentEnd - 1 || + (contentEnd - 4 > contentStart && + events[contentEnd - 2][1].type === types.whitespace)) + ) { + contentEnd -= contentStart + 1 === contentEnd ? 2 : 4 + } + + if (contentEnd > contentStart) { + content = { + type: types.atxHeadingText, + start: events[contentStart][1].start, + end: events[contentEnd][1].end + } + text = { + type: types.chunkText, + start: events[contentStart][1].start, + end: events[contentEnd][1].end, + contentType: constants.contentTypeText + } + + chunkedSplice(events, contentStart, contentEnd - contentStart + 1, [ + ['enter', content, context], + ['enter', text, context], + ['exit', text, context], + ['exit', content, context] + ]) + } + + return events +} + +function tokenizeHeadingAtx(effects, ok, nok) { + var self = this + var size = 0 + + return start + + function start(code) { + assert(code === codes.numberSign, 'expected `#`') + effects.enter(types.atxHeading) + effects.enter(types.atxHeadingSequence) + return fenceOpenInside(code) + } + + function fenceOpenInside(code) { + if ( + code === codes.numberSign && + size++ < constants.atxHeadingOpeningFenceSizeMax + ) { + effects.consume(code) + return fenceOpenInside + } + + if (code === codes.eof || markdownLineEndingOrSpace(code)) { + effects.exit(types.atxHeadingSequence) + return self.interrupt ? ok(code) : headingBreak(code) + } + + return nok(code) + } + + function headingBreak(code) { + if (code === codes.numberSign) { + effects.enter(types.atxHeadingSequence) + return sequence(code) + } + + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.atxHeading) + return ok(code) + } + + if (markdownSpace(code)) { + return spaceFactory(effects, headingBreak, types.whitespace)(code) + } + + effects.enter(types.atxHeadingText) + return data(code) + } + + function sequence(code) { + if (code === codes.numberSign) { + effects.consume(code) + return sequence + } + + effects.exit(types.atxHeadingSequence) + return headingBreak(code) + } + + function data(code) { + if ( + code === codes.eof || + code === codes.numberSign || + markdownLineEndingOrSpace(code) + ) { + effects.exit(types.atxHeadingText) + return headingBreak(code) + } + + effects.consume(code) + return data + } +} diff --git a/node_modules/micromark/lib/tokenize/html-flow.js b/node_modules/micromark/lib/tokenize/html-flow.js new file mode 100644 index 00000000..c6a894ff --- /dev/null +++ b/node_modules/micromark/lib/tokenize/html-flow.js @@ -0,0 +1,513 @@ +'use strict' + +var assert = require('assert') +var asciiAlpha = require('../character/ascii-alpha.js') +var asciiAlphanumeric = require('../character/ascii-alphanumeric.js') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var markdownSpace = require('../character/markdown-space.js') +var constants = require('../constant/constants.js') +var fromCharCode = require('../constant/from-char-code.js') +var htmlBlockNames = require('../constant/html-block-names.js') +var htmlRawNames = require('../constant/html-raw-names.js') +var types = require('../constant/types.js') +var partialBlankLine = require('./partial-blank-line.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var htmlFlow = { + name: 'htmlFlow', + tokenize: tokenizeHtmlFlow, + resolveTo: resolveToHtmlFlow, + concrete: true +} + +var nextBlankConstruct = {tokenize: tokenizeNextBlank, partial: true} + +function resolveToHtmlFlow(events) { + var index = events.length + + while (index--) { + if ( + events[index][0] === 'enter' && + events[index][1].type === types.htmlFlow + ) { + break + } + } + + if (index > 1 && events[index - 2][1].type === types.linePrefix) { + // Add the prefix start to the HTML token. + events[index][1].start = events[index - 2][1].start + // Add the prefix start to the HTML line token. + events[index + 1][1].start = events[index - 2][1].start + // Remove the line prefix. + events.splice(index - 2, 2) + } + + return events +} + +function tokenizeHtmlFlow(effects, ok, nok) { + var self = this + var kind + var startTag + var buffer + var index + var marker + + return start + + function start(code) { + assert__default['default'](code === codes.lessThan, 'expected `<`') + effects.enter(types.htmlFlow) + effects.enter(types.htmlFlowData) + effects.consume(code) + return open + } + + function open(code) { + if (code === codes.exclamationMark) { + effects.consume(code) + return declarationStart + } + + if (code === codes.slash) { + effects.consume(code) + return tagCloseStart + } + + if (code === codes.questionMark) { + effects.consume(code) + kind = constants.htmlInstruction + // While we’re in an instruction instead of a declaration, we’re on a `?` + // right now, so we do need to search for `>`, similar to declarations. + return self.interrupt ? ok : continuationDeclarationInside + } + + if (asciiAlpha(code)) { + effects.consume(code) + buffer = fromCharCode(code) + startTag = true + return tagName + } + + return nok(code) + } + + function declarationStart(code) { + if (code === codes.dash) { + effects.consume(code) + kind = constants.htmlComment + return commentOpenInside + } + + if (code === codes.leftSquareBracket) { + effects.consume(code) + kind = constants.htmlCdata + buffer = constants.cdataOpeningString + index = 0 + return cdataOpenInside + } + + if (asciiAlpha(code)) { + effects.consume(code) + kind = constants.htmlDeclaration + return self.interrupt ? ok : continuationDeclarationInside + } + + return nok(code) + } + + function commentOpenInside(code) { + if (code === codes.dash) { + effects.consume(code) + return self.interrupt ? ok : continuationDeclarationInside + } + + return nok(code) + } + + function cdataOpenInside(code) { + if (code === buffer.charCodeAt(index++)) { + effects.consume(code) + return index === buffer.length + ? self.interrupt + ? ok + : continuation + : cdataOpenInside + } + + return nok(code) + } + + function tagCloseStart(code) { + if (asciiAlpha(code)) { + effects.consume(code) + buffer = fromCharCode(code) + return tagName + } + + return nok(code) + } + + function tagName(code) { + if ( + code === codes.eof || + code === codes.slash || + code === codes.greaterThan || + markdownLineEndingOrSpace(code) + ) { + if ( + code !== codes.slash && + startTag && + htmlRawNames.indexOf(buffer.toLowerCase()) > -1 + ) { + kind = constants.htmlRaw + return self.interrupt ? ok(code) : continuation(code) + } + + if (htmlBlockNames.indexOf(buffer.toLowerCase()) > -1) { + kind = constants.htmlBasic + + if (code === codes.slash) { + effects.consume(code) + return basicSelfClosing + } + + return self.interrupt ? ok(code) : continuation(code) + } + + kind = constants.htmlComplete + // Do not support complete HTML when interrupting. + return self.interrupt + ? nok(code) + : startTag + ? completeAttributeNameBefore(code) + : completeClosingTagAfter(code) + } + + if (code === codes.dash || asciiAlphanumeric(code)) { + effects.consume(code) + buffer += fromCharCode(code) + return tagName + } + + return nok(code) + } + + function basicSelfClosing(code) { + if (code === codes.greaterThan) { + effects.consume(code) + return self.interrupt ? ok : continuation + } + + return nok(code) + } + + function completeClosingTagAfter(code) { + if (markdownSpace(code)) { + effects.consume(code) + return completeClosingTagAfter + } + + return completeEnd(code) + } + + function completeAttributeNameBefore(code) { + if (code === codes.slash) { + effects.consume(code) + return completeEnd + } + + if (code === codes.colon || code === codes.underscore || asciiAlpha(code)) { + effects.consume(code) + return completeAttributeName + } + + if (markdownSpace(code)) { + effects.consume(code) + return completeAttributeNameBefore + } + + return completeEnd(code) + } + + function completeAttributeName(code) { + if ( + code === codes.dash || + code === codes.dot || + code === codes.colon || + code === codes.underscore || + asciiAlphanumeric(code) + ) { + effects.consume(code) + return completeAttributeName + } + + return completeAttributeNameAfter(code) + } + + function completeAttributeNameAfter(code) { + if (code === codes.equalsTo) { + effects.consume(code) + return completeAttributeValueBefore + } + + if (markdownSpace(code)) { + effects.consume(code) + return completeAttributeNameAfter + } + + return completeAttributeNameBefore(code) + } + + function completeAttributeValueBefore(code) { + if ( + code === codes.eof || + code === codes.lessThan || + code === codes.equalsTo || + code === codes.greaterThan || + code === codes.graveAccent + ) { + return nok(code) + } + + if (code === codes.quotationMark || code === codes.apostrophe) { + effects.consume(code) + marker = code + return completeAttributeValueQuoted + } + + if (markdownSpace(code)) { + effects.consume(code) + return completeAttributeValueBefore + } + + marker = undefined + return completeAttributeValueUnquoted(code) + } + + function completeAttributeValueQuoted(code) { + if (code === marker) { + effects.consume(code) + return completeAttributeValueQuotedAfter + } + + if (code === codes.eof || markdownLineEnding(code)) { + return nok(code) + } + + effects.consume(code) + return completeAttributeValueQuoted + } + + function completeAttributeValueUnquoted(code) { + if ( + code === codes.eof || + code === codes.quotationMark || + code === codes.apostrophe || + code === codes.lessThan || + code === codes.equalsTo || + code === codes.greaterThan || + code === codes.graveAccent || + markdownLineEndingOrSpace(code) + ) { + return completeAttributeNameAfter(code) + } + + effects.consume(code) + return completeAttributeValueUnquoted + } + + function completeAttributeValueQuotedAfter(code) { + if ( + code === codes.slash || + code === codes.greaterThan || + markdownSpace(code) + ) { + return completeAttributeNameBefore(code) + } + + return nok(code) + } + + function completeEnd(code) { + if (code === codes.greaterThan) { + effects.consume(code) + return completeAfter + } + + return nok(code) + } + + function completeAfter(code) { + if (markdownSpace(code)) { + effects.consume(code) + return completeAfter + } + + return code === codes.eof || markdownLineEnding(code) + ? continuation(code) + : nok(code) + } + + function continuation(code) { + if (code === codes.dash && kind === constants.htmlComment) { + effects.consume(code) + return continuationCommentInside + } + + if (code === codes.lessThan && kind === constants.htmlRaw) { + effects.consume(code) + return continuationRawTagOpen + } + + if (code === codes.greaterThan && kind === constants.htmlDeclaration) { + effects.consume(code) + return continuationClose + } + + if (code === codes.questionMark && kind === constants.htmlInstruction) { + effects.consume(code) + return continuationDeclarationInside + } + + if (code === codes.rightSquareBracket && kind === constants.htmlCdata) { + effects.consume(code) + return continuationCharacterDataInside + } + + if ( + markdownLineEnding(code) && + (kind === constants.htmlBasic || kind === constants.htmlComplete) + ) { + return effects.check( + nextBlankConstruct, + continuationClose, + continuationAtLineEnding + )(code) + } + + if (code === codes.eof || markdownLineEnding(code)) { + return continuationAtLineEnding(code) + } + + effects.consume(code) + return continuation + } + + function continuationAtLineEnding(code) { + effects.exit(types.htmlFlowData) + return htmlContinueStart(code) + } + + function htmlContinueStart(code) { + if (code === codes.eof) { + return done(code) + } + + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return htmlContinueStart + } + + effects.enter(types.htmlFlowData) + return continuation(code) + } + + function continuationCommentInside(code) { + if (code === codes.dash) { + effects.consume(code) + return continuationDeclarationInside + } + + return continuation(code) + } + + function continuationRawTagOpen(code) { + if (code === codes.slash) { + effects.consume(code) + buffer = '' + return continuationRawEndTag + } + + return continuation(code) + } + + function continuationRawEndTag(code) { + if ( + code === codes.greaterThan && + htmlRawNames.indexOf(buffer.toLowerCase()) > -1 + ) { + effects.consume(code) + return continuationClose + } + + if (asciiAlpha(code) && buffer.length < constants.htmlRawSizeMax) { + effects.consume(code) + buffer += fromCharCode(code) + return continuationRawEndTag + } + + return continuation(code) + } + + function continuationCharacterDataInside(code) { + if (code === codes.rightSquareBracket) { + effects.consume(code) + return continuationDeclarationInside + } + + return continuation(code) + } + + function continuationDeclarationInside(code) { + if (code === codes.greaterThan) { + effects.consume(code) + return continuationClose + } + + return continuation(code) + } + + function continuationClose(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.htmlFlowData) + return done(code) + } + + effects.consume(code) + return continuationClose + } + + function done(code) { + effects.exit(types.htmlFlow) + return ok(code) + } +} + +function tokenizeNextBlank(effects, ok, nok) { + return start + + function start(code) { + assert__default['default']( + markdownLineEnding(code), + 'expected a line ending' + ) + effects.exit(types.htmlFlowData) + effects.enter(types.lineEndingBlank) + effects.consume(code) + effects.exit(types.lineEndingBlank) + return effects.attempt(partialBlankLine, ok, nok) + } +} + +module.exports = htmlFlow diff --git a/node_modules/micromark/lib/tokenize/html-flow.mjs b/node_modules/micromark/lib/tokenize/html-flow.mjs new file mode 100644 index 00000000..5dda6d74 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/html-flow.mjs @@ -0,0 +1,498 @@ +var htmlFlow = { + name: 'htmlFlow', + tokenize: tokenizeHtmlFlow, + resolveTo: resolveToHtmlFlow, + concrete: true +} +export default htmlFlow + +import assert from 'assert' +import asciiAlpha from '../character/ascii-alpha.mjs' +import asciiAlphanumeric from '../character/ascii-alphanumeric.mjs' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs' +import markdownSpace from '../character/markdown-space.mjs' +import constants from '../constant/constants.mjs' +import fromCharCode from '../constant/from-char-code.mjs' +import basics from '../constant/html-block-names.mjs' +import raws from '../constant/html-raw-names.mjs' +import types from '../constant/types.mjs' +import blank from './partial-blank-line.mjs' + +var nextBlankConstruct = {tokenize: tokenizeNextBlank, partial: true} + +function resolveToHtmlFlow(events) { + var index = events.length + + while (index--) { + if ( + events[index][0] === 'enter' && + events[index][1].type === types.htmlFlow + ) { + break + } + } + + if (index > 1 && events[index - 2][1].type === types.linePrefix) { + // Add the prefix start to the HTML token. + events[index][1].start = events[index - 2][1].start + // Add the prefix start to the HTML line token. + events[index + 1][1].start = events[index - 2][1].start + // Remove the line prefix. + events.splice(index - 2, 2) + } + + return events +} + +function tokenizeHtmlFlow(effects, ok, nok) { + var self = this + var kind + var startTag + var buffer + var index + var marker + + return start + + function start(code) { + assert(code === codes.lessThan, 'expected `<`') + effects.enter(types.htmlFlow) + effects.enter(types.htmlFlowData) + effects.consume(code) + return open + } + + function open(code) { + if (code === codes.exclamationMark) { + effects.consume(code) + return declarationStart + } + + if (code === codes.slash) { + effects.consume(code) + return tagCloseStart + } + + if (code === codes.questionMark) { + effects.consume(code) + kind = constants.htmlInstruction + // While we’re in an instruction instead of a declaration, we’re on a `?` + // right now, so we do need to search for `>`, similar to declarations. + return self.interrupt ? ok : continuationDeclarationInside + } + + if (asciiAlpha(code)) { + effects.consume(code) + buffer = fromCharCode(code) + startTag = true + return tagName + } + + return nok(code) + } + + function declarationStart(code) { + if (code === codes.dash) { + effects.consume(code) + kind = constants.htmlComment + return commentOpenInside + } + + if (code === codes.leftSquareBracket) { + effects.consume(code) + kind = constants.htmlCdata + buffer = constants.cdataOpeningString + index = 0 + return cdataOpenInside + } + + if (asciiAlpha(code)) { + effects.consume(code) + kind = constants.htmlDeclaration + return self.interrupt ? ok : continuationDeclarationInside + } + + return nok(code) + } + + function commentOpenInside(code) { + if (code === codes.dash) { + effects.consume(code) + return self.interrupt ? ok : continuationDeclarationInside + } + + return nok(code) + } + + function cdataOpenInside(code) { + if (code === buffer.charCodeAt(index++)) { + effects.consume(code) + return index === buffer.length + ? self.interrupt + ? ok + : continuation + : cdataOpenInside + } + + return nok(code) + } + + function tagCloseStart(code) { + if (asciiAlpha(code)) { + effects.consume(code) + buffer = fromCharCode(code) + return tagName + } + + return nok(code) + } + + function tagName(code) { + if ( + code === codes.eof || + code === codes.slash || + code === codes.greaterThan || + markdownLineEndingOrSpace(code) + ) { + if ( + code !== codes.slash && + startTag && + raws.indexOf(buffer.toLowerCase()) > -1 + ) { + kind = constants.htmlRaw + return self.interrupt ? ok(code) : continuation(code) + } + + if (basics.indexOf(buffer.toLowerCase()) > -1) { + kind = constants.htmlBasic + + if (code === codes.slash) { + effects.consume(code) + return basicSelfClosing + } + + return self.interrupt ? ok(code) : continuation(code) + } + + kind = constants.htmlComplete + // Do not support complete HTML when interrupting. + return self.interrupt + ? nok(code) + : startTag + ? completeAttributeNameBefore(code) + : completeClosingTagAfter(code) + } + + if (code === codes.dash || asciiAlphanumeric(code)) { + effects.consume(code) + buffer += fromCharCode(code) + return tagName + } + + return nok(code) + } + + function basicSelfClosing(code) { + if (code === codes.greaterThan) { + effects.consume(code) + return self.interrupt ? ok : continuation + } + + return nok(code) + } + + function completeClosingTagAfter(code) { + if (markdownSpace(code)) { + effects.consume(code) + return completeClosingTagAfter + } + + return completeEnd(code) + } + + function completeAttributeNameBefore(code) { + if (code === codes.slash) { + effects.consume(code) + return completeEnd + } + + if (code === codes.colon || code === codes.underscore || asciiAlpha(code)) { + effects.consume(code) + return completeAttributeName + } + + if (markdownSpace(code)) { + effects.consume(code) + return completeAttributeNameBefore + } + + return completeEnd(code) + } + + function completeAttributeName(code) { + if ( + code === codes.dash || + code === codes.dot || + code === codes.colon || + code === codes.underscore || + asciiAlphanumeric(code) + ) { + effects.consume(code) + return completeAttributeName + } + + return completeAttributeNameAfter(code) + } + + function completeAttributeNameAfter(code) { + if (code === codes.equalsTo) { + effects.consume(code) + return completeAttributeValueBefore + } + + if (markdownSpace(code)) { + effects.consume(code) + return completeAttributeNameAfter + } + + return completeAttributeNameBefore(code) + } + + function completeAttributeValueBefore(code) { + if ( + code === codes.eof || + code === codes.lessThan || + code === codes.equalsTo || + code === codes.greaterThan || + code === codes.graveAccent + ) { + return nok(code) + } + + if (code === codes.quotationMark || code === codes.apostrophe) { + effects.consume(code) + marker = code + return completeAttributeValueQuoted + } + + if (markdownSpace(code)) { + effects.consume(code) + return completeAttributeValueBefore + } + + marker = undefined + return completeAttributeValueUnquoted(code) + } + + function completeAttributeValueQuoted(code) { + if (code === marker) { + effects.consume(code) + return completeAttributeValueQuotedAfter + } + + if (code === codes.eof || markdownLineEnding(code)) { + return nok(code) + } + + effects.consume(code) + return completeAttributeValueQuoted + } + + function completeAttributeValueUnquoted(code) { + if ( + code === codes.eof || + code === codes.quotationMark || + code === codes.apostrophe || + code === codes.lessThan || + code === codes.equalsTo || + code === codes.greaterThan || + code === codes.graveAccent || + markdownLineEndingOrSpace(code) + ) { + return completeAttributeNameAfter(code) + } + + effects.consume(code) + return completeAttributeValueUnquoted + } + + function completeAttributeValueQuotedAfter(code) { + if ( + code === codes.slash || + code === codes.greaterThan || + markdownSpace(code) + ) { + return completeAttributeNameBefore(code) + } + + return nok(code) + } + + function completeEnd(code) { + if (code === codes.greaterThan) { + effects.consume(code) + return completeAfter + } + + return nok(code) + } + + function completeAfter(code) { + if (markdownSpace(code)) { + effects.consume(code) + return completeAfter + } + + return code === codes.eof || markdownLineEnding(code) + ? continuation(code) + : nok(code) + } + + function continuation(code) { + if (code === codes.dash && kind === constants.htmlComment) { + effects.consume(code) + return continuationCommentInside + } + + if (code === codes.lessThan && kind === constants.htmlRaw) { + effects.consume(code) + return continuationRawTagOpen + } + + if (code === codes.greaterThan && kind === constants.htmlDeclaration) { + effects.consume(code) + return continuationClose + } + + if (code === codes.questionMark && kind === constants.htmlInstruction) { + effects.consume(code) + return continuationDeclarationInside + } + + if (code === codes.rightSquareBracket && kind === constants.htmlCdata) { + effects.consume(code) + return continuationCharacterDataInside + } + + if ( + markdownLineEnding(code) && + (kind === constants.htmlBasic || kind === constants.htmlComplete) + ) { + return effects.check( + nextBlankConstruct, + continuationClose, + continuationAtLineEnding + )(code) + } + + if (code === codes.eof || markdownLineEnding(code)) { + return continuationAtLineEnding(code) + } + + effects.consume(code) + return continuation + } + + function continuationAtLineEnding(code) { + effects.exit(types.htmlFlowData) + return htmlContinueStart(code) + } + + function htmlContinueStart(code) { + if (code === codes.eof) { + return done(code) + } + + if (markdownLineEnding(code)) { + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return htmlContinueStart + } + + effects.enter(types.htmlFlowData) + return continuation(code) + } + + function continuationCommentInside(code) { + if (code === codes.dash) { + effects.consume(code) + return continuationDeclarationInside + } + + return continuation(code) + } + + function continuationRawTagOpen(code) { + if (code === codes.slash) { + effects.consume(code) + buffer = '' + return continuationRawEndTag + } + + return continuation(code) + } + + function continuationRawEndTag(code) { + if (code === codes.greaterThan && raws.indexOf(buffer.toLowerCase()) > -1) { + effects.consume(code) + return continuationClose + } + + if (asciiAlpha(code) && buffer.length < constants.htmlRawSizeMax) { + effects.consume(code) + buffer += fromCharCode(code) + return continuationRawEndTag + } + + return continuation(code) + } + + function continuationCharacterDataInside(code) { + if (code === codes.rightSquareBracket) { + effects.consume(code) + return continuationDeclarationInside + } + + return continuation(code) + } + + function continuationDeclarationInside(code) { + if (code === codes.greaterThan) { + effects.consume(code) + return continuationClose + } + + return continuation(code) + } + + function continuationClose(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.htmlFlowData) + return done(code) + } + + effects.consume(code) + return continuationClose + } + + function done(code) { + effects.exit(types.htmlFlow) + return ok(code) + } +} + +function tokenizeNextBlank(effects, ok, nok) { + return start + + function start(code) { + assert(markdownLineEnding(code), 'expected a line ending') + effects.exit(types.htmlFlowData) + effects.enter(types.lineEndingBlank) + effects.consume(code) + effects.exit(types.lineEndingBlank) + return effects.attempt(blank, ok, nok) + } +} diff --git a/node_modules/micromark/lib/tokenize/html-text.js b/node_modules/micromark/lib/tokenize/html-text.js new file mode 100644 index 00000000..eda4db2d --- /dev/null +++ b/node_modules/micromark/lib/tokenize/html-text.js @@ -0,0 +1,458 @@ +'use strict' + +var assert = require('assert') +var asciiAlpha = require('../character/ascii-alpha.js') +var asciiAlphanumeric = require('../character/ascii-alphanumeric.js') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var markdownSpace = require('../character/markdown-space.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var factorySpace = require('./factory-space.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var htmlText = { + name: 'htmlText', + tokenize: tokenizeHtmlText +} + +function tokenizeHtmlText(effects, ok, nok) { + var self = this + var marker + var buffer + var index + var returnState + + return start + + function start(code) { + assert__default['default'](code === codes.lessThan, 'expected `<`') + effects.enter(types.htmlText) + effects.enter(types.htmlTextData) + effects.consume(code) + return open + } + + function open(code) { + if (code === codes.exclamationMark) { + effects.consume(code) + return declarationOpen + } + + if (code === codes.slash) { + effects.consume(code) + return tagCloseStart + } + + if (code === codes.questionMark) { + effects.consume(code) + return instruction + } + + if (asciiAlpha(code)) { + effects.consume(code) + return tagOpen + } + + return nok(code) + } + + function declarationOpen(code) { + if (code === codes.dash) { + effects.consume(code) + return commentOpen + } + + if (code === codes.leftSquareBracket) { + effects.consume(code) + buffer = constants.cdataOpeningString + index = 0 + return cdataOpen + } + + if (asciiAlpha(code)) { + effects.consume(code) + return declaration + } + + return nok(code) + } + + function commentOpen(code) { + if (code === codes.dash) { + effects.consume(code) + return commentStart + } + + return nok(code) + } + + function commentStart(code) { + if (code === codes.eof || code === codes.greaterThan) { + return nok(code) + } + + if (code === codes.dash) { + effects.consume(code) + return commentStartDash + } + + return comment(code) + } + + function commentStartDash(code) { + if (code === codes.eof || code === codes.greaterThan) { + return nok(code) + } + + return comment(code) + } + + function comment(code) { + if (code === codes.eof) { + return nok(code) + } + + if (code === codes.dash) { + effects.consume(code) + return commentClose + } + + if (markdownLineEnding(code)) { + returnState = comment + return atLineEnding(code) + } + + effects.consume(code) + return comment + } + + function commentClose(code) { + if (code === codes.dash) { + effects.consume(code) + return end + } + + return comment(code) + } + + function cdataOpen(code) { + if (code === buffer.charCodeAt(index++)) { + effects.consume(code) + return index === buffer.length ? cdata : cdataOpen + } + + return nok(code) + } + + function cdata(code) { + if (code === codes.eof) { + return nok(code) + } + + if (code === codes.rightSquareBracket) { + effects.consume(code) + return cdataClose + } + + if (markdownLineEnding(code)) { + returnState = cdata + return atLineEnding(code) + } + + effects.consume(code) + return cdata + } + + function cdataClose(code) { + if (code === codes.rightSquareBracket) { + effects.consume(code) + return cdataEnd + } + + return cdata(code) + } + + function cdataEnd(code) { + if (code === codes.greaterThan) { + return end(code) + } + + if (code === codes.rightSquareBracket) { + effects.consume(code) + return cdataEnd + } + + return cdata(code) + } + + function declaration(code) { + if (code === codes.eof || code === codes.greaterThan) { + return end(code) + } + + if (markdownLineEnding(code)) { + returnState = declaration + return atLineEnding(code) + } + + effects.consume(code) + return declaration + } + + function instruction(code) { + if (code === codes.eof) { + return nok(code) + } + + if (code === codes.questionMark) { + effects.consume(code) + return instructionClose + } + + if (markdownLineEnding(code)) { + returnState = instruction + return atLineEnding(code) + } + + effects.consume(code) + return instruction + } + + function instructionClose(code) { + return code === codes.greaterThan ? end(code) : instruction(code) + } + + function tagCloseStart(code) { + if (asciiAlpha(code)) { + effects.consume(code) + return tagClose + } + + return nok(code) + } + + function tagClose(code) { + if (code === codes.dash || asciiAlphanumeric(code)) { + effects.consume(code) + return tagClose + } + + return tagCloseBetween(code) + } + + function tagCloseBetween(code) { + if (markdownLineEnding(code)) { + returnState = tagCloseBetween + return atLineEnding(code) + } + + if (markdownSpace(code)) { + effects.consume(code) + return tagCloseBetween + } + + return end(code) + } + + function tagOpen(code) { + if (code === codes.dash || asciiAlphanumeric(code)) { + effects.consume(code) + return tagOpen + } + + if ( + code === codes.slash || + code === codes.greaterThan || + markdownLineEndingOrSpace(code) + ) { + return tagOpenBetween(code) + } + + return nok(code) + } + + function tagOpenBetween(code) { + if (code === codes.slash) { + effects.consume(code) + return end + } + + if (code === codes.colon || code === codes.underscore || asciiAlpha(code)) { + effects.consume(code) + return tagOpenAttributeName + } + + if (markdownLineEnding(code)) { + returnState = tagOpenBetween + return atLineEnding(code) + } + + if (markdownSpace(code)) { + effects.consume(code) + return tagOpenBetween + } + + return end(code) + } + + function tagOpenAttributeName(code) { + if ( + code === codes.dash || + code === codes.dot || + code === codes.colon || + code === codes.underscore || + asciiAlphanumeric(code) + ) { + effects.consume(code) + return tagOpenAttributeName + } + + return tagOpenAttributeNameAfter(code) + } + + function tagOpenAttributeNameAfter(code) { + if (code === codes.equalsTo) { + effects.consume(code) + return tagOpenAttributeValueBefore + } + + if (markdownLineEnding(code)) { + returnState = tagOpenAttributeNameAfter + return atLineEnding(code) + } + + if (markdownSpace(code)) { + effects.consume(code) + return tagOpenAttributeNameAfter + } + + return tagOpenBetween(code) + } + + function tagOpenAttributeValueBefore(code) { + if ( + code === codes.eof || + code === codes.lessThan || + code === codes.equalsTo || + code === codes.greaterThan || + code === codes.graveAccent + ) { + return nok(code) + } + + if (code === codes.quotationMark || code === codes.apostrophe) { + effects.consume(code) + marker = code + return tagOpenAttributeValueQuoted + } + + if (markdownLineEnding(code)) { + returnState = tagOpenAttributeValueBefore + return atLineEnding(code) + } + + if (markdownSpace(code)) { + effects.consume(code) + return tagOpenAttributeValueBefore + } + + effects.consume(code) + marker = undefined + return tagOpenAttributeValueUnquoted + } + + function tagOpenAttributeValueQuoted(code) { + if (code === marker) { + effects.consume(code) + return tagOpenAttributeValueQuotedAfter + } + + if (code === codes.eof) { + return nok(code) + } + + if (markdownLineEnding(code)) { + returnState = tagOpenAttributeValueQuoted + return atLineEnding(code) + } + + effects.consume(code) + return tagOpenAttributeValueQuoted + } + + function tagOpenAttributeValueQuotedAfter(code) { + if ( + code === codes.greaterThan || + code === codes.slash || + markdownLineEndingOrSpace(code) + ) { + return tagOpenBetween(code) + } + + return nok(code) + } + + function tagOpenAttributeValueUnquoted(code) { + if ( + code === codes.eof || + code === codes.quotationMark || + code === codes.apostrophe || + code === codes.lessThan || + code === codes.equalsTo || + code === codes.graveAccent + ) { + return nok(code) + } + + if (code === codes.greaterThan || markdownLineEndingOrSpace(code)) { + return tagOpenBetween(code) + } + + effects.consume(code) + return tagOpenAttributeValueUnquoted + } + + // We can’t have blank lines in content, so no need to worry about empty + // tokens. + function atLineEnding(code) { + assert__default['default'](returnState, 'expected return state') + assert__default['default'](markdownLineEnding(code), 'expected eol') + effects.exit(types.htmlTextData) + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return factorySpace( + effects, + afterPrefix, + types.linePrefix, + self.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + ) + } + + function afterPrefix(code) { + effects.enter(types.htmlTextData) + return returnState(code) + } + + function end(code) { + if (code === codes.greaterThan) { + effects.consume(code) + effects.exit(types.htmlTextData) + effects.exit(types.htmlText) + return ok + } + + return nok(code) + } +} + +module.exports = htmlText diff --git a/node_modules/micromark/lib/tokenize/html-text.mjs b/node_modules/micromark/lib/tokenize/html-text.mjs new file mode 100644 index 00000000..2f571a0f --- /dev/null +++ b/node_modules/micromark/lib/tokenize/html-text.mjs @@ -0,0 +1,449 @@ +var htmlText = { + name: 'htmlText', + tokenize: tokenizeHtmlText +} +export default htmlText + +import assert from 'assert' +import asciiAlpha from '../character/ascii-alpha.mjs' +import asciiAlphanumeric from '../character/ascii-alphanumeric.mjs' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs' +import markdownSpace from '../character/markdown-space.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import spaceFactory from './factory-space.mjs' + +function tokenizeHtmlText(effects, ok, nok) { + var self = this + var marker + var buffer + var index + var returnState + + return start + + function start(code) { + assert(code === codes.lessThan, 'expected `<`') + effects.enter(types.htmlText) + effects.enter(types.htmlTextData) + effects.consume(code) + return open + } + + function open(code) { + if (code === codes.exclamationMark) { + effects.consume(code) + return declarationOpen + } + + if (code === codes.slash) { + effects.consume(code) + return tagCloseStart + } + + if (code === codes.questionMark) { + effects.consume(code) + return instruction + } + + if (asciiAlpha(code)) { + effects.consume(code) + return tagOpen + } + + return nok(code) + } + + function declarationOpen(code) { + if (code === codes.dash) { + effects.consume(code) + return commentOpen + } + + if (code === codes.leftSquareBracket) { + effects.consume(code) + buffer = constants.cdataOpeningString + index = 0 + return cdataOpen + } + + if (asciiAlpha(code)) { + effects.consume(code) + return declaration + } + + return nok(code) + } + + function commentOpen(code) { + if (code === codes.dash) { + effects.consume(code) + return commentStart + } + + return nok(code) + } + + function commentStart(code) { + if (code === codes.eof || code === codes.greaterThan) { + return nok(code) + } + + if (code === codes.dash) { + effects.consume(code) + return commentStartDash + } + + return comment(code) + } + + function commentStartDash(code) { + if (code === codes.eof || code === codes.greaterThan) { + return nok(code) + } + + return comment(code) + } + + function comment(code) { + if (code === codes.eof) { + return nok(code) + } + + if (code === codes.dash) { + effects.consume(code) + return commentClose + } + + if (markdownLineEnding(code)) { + returnState = comment + return atLineEnding(code) + } + + effects.consume(code) + return comment + } + + function commentClose(code) { + if (code === codes.dash) { + effects.consume(code) + return end + } + + return comment(code) + } + + function cdataOpen(code) { + if (code === buffer.charCodeAt(index++)) { + effects.consume(code) + return index === buffer.length ? cdata : cdataOpen + } + + return nok(code) + } + + function cdata(code) { + if (code === codes.eof) { + return nok(code) + } + + if (code === codes.rightSquareBracket) { + effects.consume(code) + return cdataClose + } + + if (markdownLineEnding(code)) { + returnState = cdata + return atLineEnding(code) + } + + effects.consume(code) + return cdata + } + + function cdataClose(code) { + if (code === codes.rightSquareBracket) { + effects.consume(code) + return cdataEnd + } + + return cdata(code) + } + + function cdataEnd(code) { + if (code === codes.greaterThan) { + return end(code) + } + + if (code === codes.rightSquareBracket) { + effects.consume(code) + return cdataEnd + } + + return cdata(code) + } + + function declaration(code) { + if (code === codes.eof || code === codes.greaterThan) { + return end(code) + } + + if (markdownLineEnding(code)) { + returnState = declaration + return atLineEnding(code) + } + + effects.consume(code) + return declaration + } + + function instruction(code) { + if (code === codes.eof) { + return nok(code) + } + + if (code === codes.questionMark) { + effects.consume(code) + return instructionClose + } + + if (markdownLineEnding(code)) { + returnState = instruction + return atLineEnding(code) + } + + effects.consume(code) + return instruction + } + + function instructionClose(code) { + return code === codes.greaterThan ? end(code) : instruction(code) + } + + function tagCloseStart(code) { + if (asciiAlpha(code)) { + effects.consume(code) + return tagClose + } + + return nok(code) + } + + function tagClose(code) { + if (code === codes.dash || asciiAlphanumeric(code)) { + effects.consume(code) + return tagClose + } + + return tagCloseBetween(code) + } + + function tagCloseBetween(code) { + if (markdownLineEnding(code)) { + returnState = tagCloseBetween + return atLineEnding(code) + } + + if (markdownSpace(code)) { + effects.consume(code) + return tagCloseBetween + } + + return end(code) + } + + function tagOpen(code) { + if (code === codes.dash || asciiAlphanumeric(code)) { + effects.consume(code) + return tagOpen + } + + if ( + code === codes.slash || + code === codes.greaterThan || + markdownLineEndingOrSpace(code) + ) { + return tagOpenBetween(code) + } + + return nok(code) + } + + function tagOpenBetween(code) { + if (code === codes.slash) { + effects.consume(code) + return end + } + + if (code === codes.colon || code === codes.underscore || asciiAlpha(code)) { + effects.consume(code) + return tagOpenAttributeName + } + + if (markdownLineEnding(code)) { + returnState = tagOpenBetween + return atLineEnding(code) + } + + if (markdownSpace(code)) { + effects.consume(code) + return tagOpenBetween + } + + return end(code) + } + + function tagOpenAttributeName(code) { + if ( + code === codes.dash || + code === codes.dot || + code === codes.colon || + code === codes.underscore || + asciiAlphanumeric(code) + ) { + effects.consume(code) + return tagOpenAttributeName + } + + return tagOpenAttributeNameAfter(code) + } + + function tagOpenAttributeNameAfter(code) { + if (code === codes.equalsTo) { + effects.consume(code) + return tagOpenAttributeValueBefore + } + + if (markdownLineEnding(code)) { + returnState = tagOpenAttributeNameAfter + return atLineEnding(code) + } + + if (markdownSpace(code)) { + effects.consume(code) + return tagOpenAttributeNameAfter + } + + return tagOpenBetween(code) + } + + function tagOpenAttributeValueBefore(code) { + if ( + code === codes.eof || + code === codes.lessThan || + code === codes.equalsTo || + code === codes.greaterThan || + code === codes.graveAccent + ) { + return nok(code) + } + + if (code === codes.quotationMark || code === codes.apostrophe) { + effects.consume(code) + marker = code + return tagOpenAttributeValueQuoted + } + + if (markdownLineEnding(code)) { + returnState = tagOpenAttributeValueBefore + return atLineEnding(code) + } + + if (markdownSpace(code)) { + effects.consume(code) + return tagOpenAttributeValueBefore + } + + effects.consume(code) + marker = undefined + return tagOpenAttributeValueUnquoted + } + + function tagOpenAttributeValueQuoted(code) { + if (code === marker) { + effects.consume(code) + return tagOpenAttributeValueQuotedAfter + } + + if (code === codes.eof) { + return nok(code) + } + + if (markdownLineEnding(code)) { + returnState = tagOpenAttributeValueQuoted + return atLineEnding(code) + } + + effects.consume(code) + return tagOpenAttributeValueQuoted + } + + function tagOpenAttributeValueQuotedAfter(code) { + if ( + code === codes.greaterThan || + code === codes.slash || + markdownLineEndingOrSpace(code) + ) { + return tagOpenBetween(code) + } + + return nok(code) + } + + function tagOpenAttributeValueUnquoted(code) { + if ( + code === codes.eof || + code === codes.quotationMark || + code === codes.apostrophe || + code === codes.lessThan || + code === codes.equalsTo || + code === codes.graveAccent + ) { + return nok(code) + } + + if (code === codes.greaterThan || markdownLineEndingOrSpace(code)) { + return tagOpenBetween(code) + } + + effects.consume(code) + return tagOpenAttributeValueUnquoted + } + + // We can’t have blank lines in content, so no need to worry about empty + // tokens. + function atLineEnding(code) { + assert(returnState, 'expected return state') + assert(markdownLineEnding(code), 'expected eol') + effects.exit(types.htmlTextData) + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return spaceFactory( + effects, + afterPrefix, + types.linePrefix, + self.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + ) + } + + function afterPrefix(code) { + effects.enter(types.htmlTextData) + return returnState(code) + } + + function end(code) { + if (code === codes.greaterThan) { + effects.consume(code) + effects.exit(types.htmlTextData) + effects.exit(types.htmlText) + return ok + } + + return nok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/label-end.js b/node_modules/micromark/lib/tokenize/label-end.js new file mode 100644 index 00000000..51ee2366 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/label-end.js @@ -0,0 +1,374 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var chunkedPush = require('../util/chunked-push.js') +var chunkedSplice = require('../util/chunked-splice.js') +var normalizeIdentifier = require('../util/normalize-identifier.js') +var resolveAll = require('../util/resolve-all.js') +var shallow = require('../util/shallow.js') +var factoryDestination = require('./factory-destination.js') +var factoryLabel = require('./factory-label.js') +var factoryTitle = require('./factory-title.js') +var factoryWhitespace = require('./factory-whitespace.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var labelEnd = { + name: 'labelEnd', + tokenize: tokenizeLabelEnd, + resolveTo: resolveToLabelEnd, + resolveAll: resolveAllLabelEnd +} + +var resourceConstruct = {tokenize: tokenizeResource} +var fullReferenceConstruct = {tokenize: tokenizeFullReference} +var collapsedReferenceConstruct = {tokenize: tokenizeCollapsedReference} + +function resolveAllLabelEnd(events) { + var index = -1 + var token + + while (++index < events.length) { + token = events[index][1] + + if ( + !token._used && + (token.type === types.labelImage || + token.type === types.labelLink || + token.type === types.labelEnd) + ) { + // Remove the marker. + events.splice(index + 1, token.type === types.labelImage ? 4 : 2) + token.type = types.data + index++ + } + } + + return events +} + +function resolveToLabelEnd(events, context) { + var index = events.length + var offset = 0 + var group + var label + var text + var token + var open + var close + var media + + // Find an opening. + while (index--) { + token = events[index][1] + + if (open) { + // If we see another link, or inactive link label, we’ve been here before. + if ( + token.type === types.link || + (token.type === types.labelLink && token._inactive) + ) { + break + } + + // Mark other link openings as inactive, as we can’t have links in + // links. + if (events[index][0] === 'enter' && token.type === types.labelLink) { + token._inactive = true + } + } else if (close) { + if ( + events[index][0] === 'enter' && + (token.type === types.labelImage || token.type === types.labelLink) && + !token._balanced + ) { + open = index + + if (token.type !== types.labelLink) { + offset = 2 + break + } + } + } else if (token.type === types.labelEnd) { + close = index + } + } + + group = { + type: events[open][1].type === types.labelLink ? types.link : types.image, + start: shallow(events[open][1].start), + end: shallow(events[events.length - 1][1].end) + } + + label = { + type: types.label, + start: shallow(events[open][1].start), + end: shallow(events[close][1].end) + } + + text = { + type: types.labelText, + start: shallow(events[open + offset + 2][1].end), + end: shallow(events[close - 2][1].start) + } + + media = [ + ['enter', group, context], + ['enter', label, context] + ] + + // Opening marker. + media = chunkedPush(media, events.slice(open + 1, open + offset + 3)) + + // Text open. + media = chunkedPush(media, [['enter', text, context]]) + + // Between. + media = chunkedPush( + media, + resolveAll( + context.parser.constructs.insideSpan.null, + events.slice(open + offset + 4, close - 3), + context + ) + ) + + // Text close, marker close, label close. + media = chunkedPush(media, [ + ['exit', text, context], + events[close - 2], + events[close - 1], + ['exit', label, context] + ]) + + // Reference, resource, or so. + media = chunkedPush(media, events.slice(close + 1)) + + // Media close. + media = chunkedPush(media, [['exit', group, context]]) + + chunkedSplice(events, open, events.length, media) + + return events +} + +function tokenizeLabelEnd(effects, ok, nok) { + var self = this + var index = self.events.length + var labelStart + var defined + + // Find an opening. + while (index--) { + if ( + (self.events[index][1].type === types.labelImage || + self.events[index][1].type === types.labelLink) && + !self.events[index][1]._balanced + ) { + labelStart = self.events[index][1] + break + } + } + + return start + + function start(code) { + assert__default['default']( + code === codes.rightSquareBracket, + 'expected `]`' + ) + + if (!labelStart) { + return nok(code) + } + + // It’s a balanced bracket, but contains a link. + if (labelStart._inactive) return balanced(code) + defined = + self.parser.defined.indexOf( + normalizeIdentifier( + self.sliceSerialize({start: labelStart.end, end: self.now()}) + ) + ) > -1 + effects.enter(types.labelEnd) + effects.enter(types.labelMarker) + effects.consume(code) + effects.exit(types.labelMarker) + effects.exit(types.labelEnd) + return afterLabelEnd + } + + function afterLabelEnd(code) { + // Resource: `[asd](fgh)`. + if (code === codes.leftParenthesis) { + return effects.attempt( + resourceConstruct, + ok, + defined ? ok : balanced + )(code) + } + + // Collapsed (`[asd][]`) or full (`[asd][fgh]`) reference? + if (code === codes.leftSquareBracket) { + return effects.attempt( + fullReferenceConstruct, + ok, + defined + ? effects.attempt(collapsedReferenceConstruct, ok, balanced) + : balanced + )(code) + } + + // Shortcut reference: `[asd]`? + return defined ? ok(code) : balanced(code) + } + + function balanced(code) { + labelStart._balanced = true + return nok(code) + } +} + +function tokenizeResource(effects, ok, nok) { + return start + + function start(code) { + assert__default['default'].equal( + code, + codes.leftParenthesis, + 'expected left paren' + ) + effects.enter(types.resource) + effects.enter(types.resourceMarker) + effects.consume(code) + effects.exit(types.resourceMarker) + return factoryWhitespace(effects, open) + } + + function open(code) { + if (code === codes.rightParenthesis) { + return end(code) + } + + return factoryDestination( + effects, + destinationAfter, + nok, + types.resourceDestination, + types.resourceDestinationLiteral, + types.resourceDestinationLiteralMarker, + types.resourceDestinationRaw, + types.resourceDestinationString, + constants.linkResourceDestinationBalanceMax + )(code) + } + + function destinationAfter(code) { + return markdownLineEndingOrSpace(code) + ? factoryWhitespace(effects, between)(code) + : end(code) + } + + function between(code) { + if ( + code === codes.quotationMark || + code === codes.apostrophe || + code === codes.leftParenthesis + ) { + return factoryTitle( + effects, + factoryWhitespace(effects, end), + nok, + types.resourceTitle, + types.resourceTitleMarker, + types.resourceTitleString + )(code) + } + + return end(code) + } + + function end(code) { + if (code === codes.rightParenthesis) { + effects.enter(types.resourceMarker) + effects.consume(code) + effects.exit(types.resourceMarker) + effects.exit(types.resource) + return ok + } + + return nok(code) + } +} + +function tokenizeFullReference(effects, ok, nok) { + var self = this + + return start + + function start(code) { + assert__default['default'].equal( + code, + codes.leftSquareBracket, + 'expected left bracket' + ) + return factoryLabel.call( + self, + effects, + afterLabel, + nok, + types.reference, + types.referenceMarker, + types.referenceString + )(code) + } + + function afterLabel(code) { + return self.parser.defined.indexOf( + normalizeIdentifier( + self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1) + ) + ) < 0 + ? nok(code) + : ok(code) + } +} + +function tokenizeCollapsedReference(effects, ok, nok) { + return start + + function start(code) { + assert__default['default'].equal( + code, + codes.leftSquareBracket, + 'expected left bracket' + ) + effects.enter(types.reference) + effects.enter(types.referenceMarker) + effects.consume(code) + effects.exit(types.referenceMarker) + return open + } + + function open(code) { + if (code === codes.rightSquareBracket) { + effects.enter(types.referenceMarker) + effects.consume(code) + effects.exit(types.referenceMarker) + effects.exit(types.reference) + return ok + } + + return nok(code) + } +} + +module.exports = labelEnd diff --git a/node_modules/micromark/lib/tokenize/label-end.mjs b/node_modules/micromark/lib/tokenize/label-end.mjs new file mode 100644 index 00000000..16beeb07 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/label-end.mjs @@ -0,0 +1,350 @@ +var labelEnd = { + name: 'labelEnd', + tokenize: tokenizeLabelEnd, + resolveTo: resolveToLabelEnd, + resolveAll: resolveAllLabelEnd +} +export default labelEnd + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import chunkedPush from '../util/chunked-push.mjs' +import chunkedSplice from '../util/chunked-splice.mjs' +import normalizeIdentifier from '../util/normalize-identifier.mjs' +import resolveAll from '../util/resolve-all.mjs' +import shallow from '../util/shallow.mjs' +import destinationFactory from './factory-destination.mjs' +import labelFactory from './factory-label.mjs' +import titleFactory from './factory-title.mjs' +import whitespaceFactory from './factory-whitespace.mjs' + +var resourceConstruct = {tokenize: tokenizeResource} +var fullReferenceConstruct = {tokenize: tokenizeFullReference} +var collapsedReferenceConstruct = {tokenize: tokenizeCollapsedReference} + +function resolveAllLabelEnd(events) { + var index = -1 + var token + + while (++index < events.length) { + token = events[index][1] + + if ( + !token._used && + (token.type === types.labelImage || + token.type === types.labelLink || + token.type === types.labelEnd) + ) { + // Remove the marker. + events.splice(index + 1, token.type === types.labelImage ? 4 : 2) + token.type = types.data + index++ + } + } + + return events +} + +function resolveToLabelEnd(events, context) { + var index = events.length + var offset = 0 + var group + var label + var text + var token + var open + var close + var media + + // Find an opening. + while (index--) { + token = events[index][1] + + if (open) { + // If we see another link, or inactive link label, we’ve been here before. + if ( + token.type === types.link || + (token.type === types.labelLink && token._inactive) + ) { + break + } + + // Mark other link openings as inactive, as we can’t have links in + // links. + if (events[index][0] === 'enter' && token.type === types.labelLink) { + token._inactive = true + } + } else if (close) { + if ( + events[index][0] === 'enter' && + (token.type === types.labelImage || token.type === types.labelLink) && + !token._balanced + ) { + open = index + + if (token.type !== types.labelLink) { + offset = 2 + break + } + } + } else if (token.type === types.labelEnd) { + close = index + } + } + + group = { + type: events[open][1].type === types.labelLink ? types.link : types.image, + start: shallow(events[open][1].start), + end: shallow(events[events.length - 1][1].end) + } + + label = { + type: types.label, + start: shallow(events[open][1].start), + end: shallow(events[close][1].end) + } + + text = { + type: types.labelText, + start: shallow(events[open + offset + 2][1].end), + end: shallow(events[close - 2][1].start) + } + + media = [ + ['enter', group, context], + ['enter', label, context] + ] + + // Opening marker. + media = chunkedPush(media, events.slice(open + 1, open + offset + 3)) + + // Text open. + media = chunkedPush(media, [['enter', text, context]]) + + // Between. + media = chunkedPush( + media, + resolveAll( + context.parser.constructs.insideSpan.null, + events.slice(open + offset + 4, close - 3), + context + ) + ) + + // Text close, marker close, label close. + media = chunkedPush(media, [ + ['exit', text, context], + events[close - 2], + events[close - 1], + ['exit', label, context] + ]) + + // Reference, resource, or so. + media = chunkedPush(media, events.slice(close + 1)) + + // Media close. + media = chunkedPush(media, [['exit', group, context]]) + + chunkedSplice(events, open, events.length, media) + + return events +} + +function tokenizeLabelEnd(effects, ok, nok) { + var self = this + var index = self.events.length + var labelStart + var defined + + // Find an opening. + while (index--) { + if ( + (self.events[index][1].type === types.labelImage || + self.events[index][1].type === types.labelLink) && + !self.events[index][1]._balanced + ) { + labelStart = self.events[index][1] + break + } + } + + return start + + function start(code) { + assert(code === codes.rightSquareBracket, 'expected `]`') + + if (!labelStart) { + return nok(code) + } + + // It’s a balanced bracket, but contains a link. + if (labelStart._inactive) return balanced(code) + defined = + self.parser.defined.indexOf( + normalizeIdentifier( + self.sliceSerialize({start: labelStart.end, end: self.now()}) + ) + ) > -1 + effects.enter(types.labelEnd) + effects.enter(types.labelMarker) + effects.consume(code) + effects.exit(types.labelMarker) + effects.exit(types.labelEnd) + return afterLabelEnd + } + + function afterLabelEnd(code) { + // Resource: `[asd](fgh)`. + if (code === codes.leftParenthesis) { + return effects.attempt( + resourceConstruct, + ok, + defined ? ok : balanced + )(code) + } + + // Collapsed (`[asd][]`) or full (`[asd][fgh]`) reference? + if (code === codes.leftSquareBracket) { + return effects.attempt( + fullReferenceConstruct, + ok, + defined + ? effects.attempt(collapsedReferenceConstruct, ok, balanced) + : balanced + )(code) + } + + // Shortcut reference: `[asd]`? + return defined ? ok(code) : balanced(code) + } + + function balanced(code) { + labelStart._balanced = true + return nok(code) + } +} + +function tokenizeResource(effects, ok, nok) { + return start + + function start(code) { + assert.equal(code, codes.leftParenthesis, 'expected left paren') + effects.enter(types.resource) + effects.enter(types.resourceMarker) + effects.consume(code) + effects.exit(types.resourceMarker) + return whitespaceFactory(effects, open) + } + + function open(code) { + if (code === codes.rightParenthesis) { + return end(code) + } + + return destinationFactory( + effects, + destinationAfter, + nok, + types.resourceDestination, + types.resourceDestinationLiteral, + types.resourceDestinationLiteralMarker, + types.resourceDestinationRaw, + types.resourceDestinationString, + constants.linkResourceDestinationBalanceMax + )(code) + } + + function destinationAfter(code) { + return markdownLineEndingOrSpace(code) + ? whitespaceFactory(effects, between)(code) + : end(code) + } + + function between(code) { + if ( + code === codes.quotationMark || + code === codes.apostrophe || + code === codes.leftParenthesis + ) { + return titleFactory( + effects, + whitespaceFactory(effects, end), + nok, + types.resourceTitle, + types.resourceTitleMarker, + types.resourceTitleString + )(code) + } + + return end(code) + } + + function end(code) { + if (code === codes.rightParenthesis) { + effects.enter(types.resourceMarker) + effects.consume(code) + effects.exit(types.resourceMarker) + effects.exit(types.resource) + return ok + } + + return nok(code) + } +} + +function tokenizeFullReference(effects, ok, nok) { + var self = this + + return start + + function start(code) { + assert.equal(code, codes.leftSquareBracket, 'expected left bracket') + return labelFactory.call( + self, + effects, + afterLabel, + nok, + types.reference, + types.referenceMarker, + types.referenceString + )(code) + } + + function afterLabel(code) { + return self.parser.defined.indexOf( + normalizeIdentifier( + self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1) + ) + ) < 0 + ? nok(code) + : ok(code) + } +} + +function tokenizeCollapsedReference(effects, ok, nok) { + return start + + function start(code) { + assert.equal(code, codes.leftSquareBracket, 'expected left bracket') + effects.enter(types.reference) + effects.enter(types.referenceMarker) + effects.consume(code) + effects.exit(types.referenceMarker) + return open + } + + function open(code) { + if (code === codes.rightSquareBracket) { + effects.enter(types.referenceMarker) + effects.consume(code) + effects.exit(types.referenceMarker) + effects.exit(types.reference) + return ok + } + + return nok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/label-start-image.js b/node_modules/micromark/lib/tokenize/label-start-image.js new file mode 100644 index 00000000..727a4687 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/label-start-image.js @@ -0,0 +1,56 @@ +'use strict' + +var labelEnd = require('./label-end.js') +var assert = require('assert') +var codes = require('../character/codes.js') +var types = require('../constant/types.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var labelStartImage = { + name: 'labelStartImage', + tokenize: tokenizeLabelStartImage, + resolveAll: labelEnd.resolveAll +} + +function tokenizeLabelStartImage(effects, ok, nok) { + var self = this + + return start + + function start(code) { + assert__default['default'](code === codes.exclamationMark, 'expected `!`') + effects.enter(types.labelImage) + effects.enter(types.labelImageMarker) + effects.consume(code) + effects.exit(types.labelImageMarker) + return open + } + + function open(code) { + if (code === codes.leftSquareBracket) { + effects.enter(types.labelMarker) + effects.consume(code) + effects.exit(types.labelMarker) + effects.exit(types.labelImage) + return after + } + + return nok(code) + } + + function after(code) { + /* c8 ignore next */ + return code === codes.caret && + /* c8 ignore next */ + '_hiddenFootnoteSupport' in self.parser.constructs + ? /* c8 ignore next */ nok(code) + : ok(code) + } +} + +module.exports = labelStartImage diff --git a/node_modules/micromark/lib/tokenize/label-start-image.mjs b/node_modules/micromark/lib/tokenize/label-start-image.mjs new file mode 100644 index 00000000..a5bef6e8 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/label-start-image.mjs @@ -0,0 +1,48 @@ +import labelEnd from './label-end.mjs' + +var labelStartImage = { + name: 'labelStartImage', + tokenize: tokenizeLabelStartImage, + resolveAll: labelEnd.resolveAll +} +export default labelStartImage + +import assert from 'assert' +import codes from '../character/codes.mjs' +import types from '../constant/types.mjs' + +function tokenizeLabelStartImage(effects, ok, nok) { + var self = this + + return start + + function start(code) { + assert(code === codes.exclamationMark, 'expected `!`') + effects.enter(types.labelImage) + effects.enter(types.labelImageMarker) + effects.consume(code) + effects.exit(types.labelImageMarker) + return open + } + + function open(code) { + if (code === codes.leftSquareBracket) { + effects.enter(types.labelMarker) + effects.consume(code) + effects.exit(types.labelMarker) + effects.exit(types.labelImage) + return after + } + + return nok(code) + } + + function after(code) { + /* c8 ignore next */ + return code === codes.caret && + /* c8 ignore next */ + '_hiddenFootnoteSupport' in self.parser.constructs + ? /* c8 ignore next */ nok(code) + : ok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/label-start-link.js b/node_modules/micromark/lib/tokenize/label-start-link.js new file mode 100644 index 00000000..a31a1a3d --- /dev/null +++ b/node_modules/micromark/lib/tokenize/label-start-link.js @@ -0,0 +1,46 @@ +'use strict' + +var labelEnd = require('./label-end.js') +var assert = require('assert') +var codes = require('../character/codes.js') +var types = require('../constant/types.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var labelStartLink = { + name: 'labelStartLink', + tokenize: tokenizeLabelStartLink, + resolveAll: labelEnd.resolveAll +} + +function tokenizeLabelStartLink(effects, ok, nok) { + var self = this + + return start + + function start(code) { + assert__default['default'](code === codes.leftSquareBracket, 'expected `[`') + effects.enter(types.labelLink) + effects.enter(types.labelMarker) + effects.consume(code) + effects.exit(types.labelMarker) + effects.exit(types.labelLink) + return after + } + + function after(code) { + /* c8 ignore next */ + return code === codes.caret && + /* c8 ignore next */ + '_hiddenFootnoteSupport' in self.parser.constructs + ? /* c8 ignore next */ + nok(code) + : ok(code) + } +} + +module.exports = labelStartLink diff --git a/node_modules/micromark/lib/tokenize/label-start-link.mjs b/node_modules/micromark/lib/tokenize/label-start-link.mjs new file mode 100644 index 00000000..7e92c6d1 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/label-start-link.mjs @@ -0,0 +1,38 @@ +import labelEnd from './label-end.mjs' + +var labelStartLink = { + name: 'labelStartLink', + tokenize: tokenizeLabelStartLink, + resolveAll: labelEnd.resolveAll +} +export default labelStartLink + +import assert from 'assert' +import codes from '../character/codes.mjs' +import types from '../constant/types.mjs' + +function tokenizeLabelStartLink(effects, ok, nok) { + var self = this + + return start + + function start(code) { + assert(code === codes.leftSquareBracket, 'expected `[`') + effects.enter(types.labelLink) + effects.enter(types.labelMarker) + effects.consume(code) + effects.exit(types.labelMarker) + effects.exit(types.labelLink) + return after + } + + function after(code) { + /* c8 ignore next */ + return code === codes.caret && + /* c8 ignore next */ + '_hiddenFootnoteSupport' in self.parser.constructs + ? /* c8 ignore next */ + nok(code) + : ok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/line-ending.js b/node_modules/micromark/lib/tokenize/line-ending.js new file mode 100644 index 00000000..e56215c9 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/line-ending.js @@ -0,0 +1,31 @@ +'use strict' + +var assert = require('assert') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var types = require('../constant/types.js') +var factorySpace = require('./factory-space.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var lineEnding = { + name: 'lineEnding', + tokenize: tokenizeLineEnding +} + +function tokenizeLineEnding(effects, ok) { + return start + + function start(code) { + assert__default['default'](markdownLineEnding(code), 'expected eol') + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return factorySpace(effects, ok, types.linePrefix) + } +} + +module.exports = lineEnding diff --git a/node_modules/micromark/lib/tokenize/line-ending.mjs b/node_modules/micromark/lib/tokenize/line-ending.mjs new file mode 100644 index 00000000..63029268 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/line-ending.mjs @@ -0,0 +1,22 @@ +var lineEnding = { + name: 'lineEnding', + tokenize: tokenizeLineEnding +} +export default lineEnding + +import assert from 'assert' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import types from '../constant/types.mjs' +import spaceFactory from './factory-space.mjs' + +function tokenizeLineEnding(effects, ok) { + return start + + function start(code) { + assert(markdownLineEnding(code), 'expected eol') + effects.enter(types.lineEnding) + effects.consume(code) + effects.exit(types.lineEnding) + return spaceFactory(effects, ok, types.linePrefix) + } +} diff --git a/node_modules/micromark/lib/tokenize/list.js b/node_modules/micromark/lib/tokenize/list.js new file mode 100644 index 00000000..44f7615f --- /dev/null +++ b/node_modules/micromark/lib/tokenize/list.js @@ -0,0 +1,219 @@ +'use strict' + +var asciiDigit = require('../character/ascii-digit.js') +var codes = require('../character/codes.js') +var markdownSpace = require('../character/markdown-space.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var prefixSize = require('../util/prefix-size.js') +var sizeChunks = require('../util/size-chunks.js') +var factorySpace = require('./factory-space.js') +var partialBlankLine = require('./partial-blank-line.js') +var thematicBreak = require('./thematic-break.js') + +var list = { + name: 'list', + tokenize: tokenizeListStart, + continuation: {tokenize: tokenizeListContinuation}, + exit: tokenizeListEnd +} + +var listItemPrefixWhitespaceConstruct = { + tokenize: tokenizeListItemPrefixWhitespace, + partial: true +} +var indentConstruct = {tokenize: tokenizeIndent, partial: true} + +function tokenizeListStart(effects, ok, nok) { + var self = this + var initialSize = prefixSize(self.events, types.linePrefix) + var size = 0 + + return start + + function start(code) { + var kind = + self.containerState.type || + (code === codes.asterisk || code === codes.plusSign || code === codes.dash + ? types.listUnordered + : types.listOrdered) + + if ( + kind === types.listUnordered + ? !self.containerState.marker || code === self.containerState.marker + : asciiDigit(code) + ) { + if (!self.containerState.type) { + self.containerState.type = kind + effects.enter(kind, {_container: true}) + } + + if (kind === types.listUnordered) { + effects.enter(types.listItemPrefix) + return code === codes.asterisk || code === codes.dash + ? effects.check(thematicBreak, nok, atMarker)(code) + : atMarker(code) + } + + if (!self.interrupt || code === codes.digit1) { + effects.enter(types.listItemPrefix) + effects.enter(types.listItemValue) + return inside(code) + } + } + + return nok(code) + } + + function inside(code) { + if (asciiDigit(code) && ++size < constants.listItemValueSizeMax) { + effects.consume(code) + return inside + } + + if ( + (!self.interrupt || size < 2) && + (self.containerState.marker + ? code === self.containerState.marker + : code === codes.rightParenthesis || code === codes.dot) + ) { + effects.exit(types.listItemValue) + return atMarker(code) + } + + return nok(code) + } + + function atMarker(code) { + effects.enter(types.listItemMarker) + effects.consume(code) + effects.exit(types.listItemMarker) + self.containerState.marker = self.containerState.marker || code + return effects.check( + partialBlankLine, + // Can’t be empty when interrupting. + self.interrupt ? nok : onBlank, + effects.attempt( + listItemPrefixWhitespaceConstruct, + endOfPrefix, + otherPrefix + ) + ) + } + + function onBlank(code) { + self.containerState.initialBlankLine = true + initialSize++ + return endOfPrefix(code) + } + + function otherPrefix(code) { + if (markdownSpace(code)) { + effects.enter(types.listItemPrefixWhitespace) + effects.consume(code) + effects.exit(types.listItemPrefixWhitespace) + return endOfPrefix + } + + return nok(code) + } + + function endOfPrefix(code) { + self.containerState.size = + initialSize + + sizeChunks(self.sliceStream(effects.exit(types.listItemPrefix))) + return ok(code) + } +} + +function tokenizeListContinuation(effects, ok, nok) { + var self = this + + self.containerState._closeFlow = undefined + + return effects.check(partialBlankLine, onBlank, notBlank) + + function onBlank(code) { + self.containerState.furtherBlankLines = + self.containerState.furtherBlankLines || + self.containerState.initialBlankLine + + // We have a blank line. + // Still, try to consume at most the items size. + return factorySpace( + effects, + ok, + types.listItemIndent, + self.containerState.size + 1 + )(code) + } + + function notBlank(code) { + if (self.containerState.furtherBlankLines || !markdownSpace(code)) { + self.containerState.furtherBlankLines = self.containerState.initialBlankLine = undefined + return notInCurrentItem(code) + } + + self.containerState.furtherBlankLines = self.containerState.initialBlankLine = undefined + return effects.attempt(indentConstruct, ok, notInCurrentItem)(code) + } + + function notInCurrentItem(code) { + // While we do continue, we signal that the flow should be closed. + self.containerState._closeFlow = true + // As we’re closing flow, we’re no longer interrupting. + self.interrupt = undefined + return factorySpace( + effects, + effects.attempt(list, ok, nok), + types.linePrefix, + self.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + )(code) + } +} + +function tokenizeIndent(effects, ok, nok) { + var self = this + + return factorySpace( + effects, + afterPrefix, + types.listItemIndent, + self.containerState.size + 1 + ) + + function afterPrefix(code) { + return prefixSize(self.events, types.listItemIndent) === + self.containerState.size + ? ok(code) + : nok(code) + } +} + +function tokenizeListEnd(effects) { + effects.exit(this.containerState.type) +} + +function tokenizeListItemPrefixWhitespace(effects, ok, nok) { + var self = this + + return factorySpace( + effects, + afterPrefix, + types.listItemPrefixWhitespace, + self.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + 1 + ) + + function afterPrefix(code) { + return markdownSpace(code) || + !prefixSize(self.events, types.listItemPrefixWhitespace) + ? nok(code) + : ok(code) + } +} + +module.exports = list diff --git a/node_modules/micromark/lib/tokenize/list.mjs b/node_modules/micromark/lib/tokenize/list.mjs new file mode 100644 index 00000000..017a6eab --- /dev/null +++ b/node_modules/micromark/lib/tokenize/list.mjs @@ -0,0 +1,216 @@ +var list = { + name: 'list', + tokenize: tokenizeListStart, + continuation: {tokenize: tokenizeListContinuation}, + exit: tokenizeListEnd +} +export default list + +import asciiDigit from '../character/ascii-digit.mjs' +import codes from '../character/codes.mjs' +import markdownSpace from '../character/markdown-space.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import prefixSize from '../util/prefix-size.mjs' +import sizeChunks from '../util/size-chunks.mjs' +import spaceFactory from './factory-space.mjs' +import blank from './partial-blank-line.mjs' +import thematicBreak from './thematic-break.mjs' + +var listItemPrefixWhitespaceConstruct = { + tokenize: tokenizeListItemPrefixWhitespace, + partial: true +} +var indentConstruct = {tokenize: tokenizeIndent, partial: true} + +function tokenizeListStart(effects, ok, nok) { + var self = this + var initialSize = prefixSize(self.events, types.linePrefix) + var size = 0 + + return start + + function start(code) { + var kind = + self.containerState.type || + (code === codes.asterisk || code === codes.plusSign || code === codes.dash + ? types.listUnordered + : types.listOrdered) + + if ( + kind === types.listUnordered + ? !self.containerState.marker || code === self.containerState.marker + : asciiDigit(code) + ) { + if (!self.containerState.type) { + self.containerState.type = kind + effects.enter(kind, {_container: true}) + } + + if (kind === types.listUnordered) { + effects.enter(types.listItemPrefix) + return code === codes.asterisk || code === codes.dash + ? effects.check(thematicBreak, nok, atMarker)(code) + : atMarker(code) + } + + if (!self.interrupt || code === codes.digit1) { + effects.enter(types.listItemPrefix) + effects.enter(types.listItemValue) + return inside(code) + } + } + + return nok(code) + } + + function inside(code) { + if (asciiDigit(code) && ++size < constants.listItemValueSizeMax) { + effects.consume(code) + return inside + } + + if ( + (!self.interrupt || size < 2) && + (self.containerState.marker + ? code === self.containerState.marker + : code === codes.rightParenthesis || code === codes.dot) + ) { + effects.exit(types.listItemValue) + return atMarker(code) + } + + return nok(code) + } + + function atMarker(code) { + effects.enter(types.listItemMarker) + effects.consume(code) + effects.exit(types.listItemMarker) + self.containerState.marker = self.containerState.marker || code + return effects.check( + blank, + // Can’t be empty when interrupting. + self.interrupt ? nok : onBlank, + effects.attempt( + listItemPrefixWhitespaceConstruct, + endOfPrefix, + otherPrefix + ) + ) + } + + function onBlank(code) { + self.containerState.initialBlankLine = true + initialSize++ + return endOfPrefix(code) + } + + function otherPrefix(code) { + if (markdownSpace(code)) { + effects.enter(types.listItemPrefixWhitespace) + effects.consume(code) + effects.exit(types.listItemPrefixWhitespace) + return endOfPrefix + } + + return nok(code) + } + + function endOfPrefix(code) { + self.containerState.size = + initialSize + + sizeChunks(self.sliceStream(effects.exit(types.listItemPrefix))) + return ok(code) + } +} + +function tokenizeListContinuation(effects, ok, nok) { + var self = this + + self.containerState._closeFlow = undefined + + return effects.check(blank, onBlank, notBlank) + + function onBlank(code) { + self.containerState.furtherBlankLines = + self.containerState.furtherBlankLines || + self.containerState.initialBlankLine + + // We have a blank line. + // Still, try to consume at most the items size. + return spaceFactory( + effects, + ok, + types.listItemIndent, + self.containerState.size + 1 + )(code) + } + + function notBlank(code) { + if (self.containerState.furtherBlankLines || !markdownSpace(code)) { + self.containerState.furtherBlankLines = self.containerState.initialBlankLine = undefined + return notInCurrentItem(code) + } + + self.containerState.furtherBlankLines = self.containerState.initialBlankLine = undefined + return effects.attempt(indentConstruct, ok, notInCurrentItem)(code) + } + + function notInCurrentItem(code) { + // While we do continue, we signal that the flow should be closed. + self.containerState._closeFlow = true + // As we’re closing flow, we’re no longer interrupting. + self.interrupt = undefined + return spaceFactory( + effects, + effects.attempt(list, ok, nok), + types.linePrefix, + self.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + )(code) + } +} + +function tokenizeIndent(effects, ok, nok) { + var self = this + + return spaceFactory( + effects, + afterPrefix, + types.listItemIndent, + self.containerState.size + 1 + ) + + function afterPrefix(code) { + return prefixSize(self.events, types.listItemIndent) === + self.containerState.size + ? ok(code) + : nok(code) + } +} + +function tokenizeListEnd(effects) { + effects.exit(this.containerState.type) +} + +function tokenizeListItemPrefixWhitespace(effects, ok, nok) { + var self = this + + return spaceFactory( + effects, + afterPrefix, + types.listItemPrefixWhitespace, + self.parser.constructs.disable.null.indexOf('codeIndented') > -1 + ? undefined + : constants.tabSize + 1 + ) + + function afterPrefix(code) { + return markdownSpace(code) || + !prefixSize(self.events, types.listItemPrefixWhitespace) + ? nok(code) + : ok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/partial-blank-line.js b/node_modules/micromark/lib/tokenize/partial-blank-line.js new file mode 100644 index 00000000..073824b3 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/partial-blank-line.js @@ -0,0 +1,21 @@ +'use strict' + +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var types = require('../constant/types.js') +var factorySpace = require('./factory-space.js') + +var partialBlankLine = { + tokenize: tokenizePartialBlankLine, + partial: true +} + +function tokenizePartialBlankLine(effects, ok, nok) { + return factorySpace(effects, afterWhitespace, types.linePrefix) + + function afterWhitespace(code) { + return code === codes.eof || markdownLineEnding(code) ? ok(code) : nok(code) + } +} + +module.exports = partialBlankLine diff --git a/node_modules/micromark/lib/tokenize/partial-blank-line.mjs b/node_modules/micromark/lib/tokenize/partial-blank-line.mjs new file mode 100644 index 00000000..de856585 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/partial-blank-line.mjs @@ -0,0 +1,18 @@ +var partialBlankLine = { + tokenize: tokenizePartialBlankLine, + partial: true +} +export default partialBlankLine + +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import types from '../constant/types.mjs' +import spaceFactory from './factory-space.mjs' + +function tokenizePartialBlankLine(effects, ok, nok) { + return spaceFactory(effects, afterWhitespace, types.linePrefix) + + function afterWhitespace(code) { + return code === codes.eof || markdownLineEnding(code) ? ok(code) : nok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/setext-underline.js b/node_modules/micromark/lib/tokenize/setext-underline.js new file mode 100644 index 00000000..9ac1e5c4 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/setext-underline.js @@ -0,0 +1,138 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var types = require('../constant/types.js') +var shallow = require('../util/shallow.js') +var factorySpace = require('./factory-space.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var setextUnderline = { + name: 'setextUnderline', + tokenize: tokenizeSetextUnderline, + resolveTo: resolveToSetextUnderline +} + +function resolveToSetextUnderline(events, context) { + var index = events.length + var content + var text + var definition + var heading + + // Find the opening of the content. + // It’ll always exist: we don’t tokenize if it isn’t there. + while (index--) { + if (events[index][0] === 'enter') { + if (events[index][1].type === types.content) { + content = index + break + } + + if (events[index][1].type === types.paragraph) { + text = index + } + } + // Exit + else { + if (events[index][1].type === types.content) { + // Remove the content end (if needed we’ll add it later) + events.splice(index, 1) + } + + if (!definition && events[index][1].type === types.definition) { + definition = index + } + } + } + + heading = { + type: types.setextHeading, + start: shallow(events[text][1].start), + end: shallow(events[events.length - 1][1].end) + } + + // Change the paragraph to setext heading text. + events[text][1].type = types.setextHeadingText + + // If we have definitions in the content, we’ll keep on having content, + // but we need move it. + if (definition) { + events.splice(text, 0, ['enter', heading, context]) + events.splice(definition + 1, 0, ['exit', events[content][1], context]) + events[content][1].end = shallow(events[definition][1].end) + } else { + events[content][1] = heading + } + + // Add the heading exit at the end. + events.push(['exit', heading, context]) + + return events +} + +function tokenizeSetextUnderline(effects, ok, nok) { + var self = this + var index = self.events.length + var marker + var paragraph + + // Find an opening. + while (index--) { + // Skip enter/exit of line ending, line prefix, and content. + // We can now either have a definition or a paragraph. + if ( + self.events[index][1].type !== types.lineEnding && + self.events[index][1].type !== types.linePrefix && + self.events[index][1].type !== types.content + ) { + paragraph = self.events[index][1].type === types.paragraph + break + } + } + + return start + + function start(code) { + assert__default['default']( + code === codes.dash || code === codes.equalsTo, + 'expected `=` or `-`' + ) + + if (!self.lazy && (self.interrupt || paragraph)) { + effects.enter(types.setextHeadingLine) + effects.enter(types.setextHeadingLineSequence) + marker = code + return closingSequence(code) + } + + return nok(code) + } + + function closingSequence(code) { + if (code === marker) { + effects.consume(code) + return closingSequence + } + + effects.exit(types.setextHeadingLineSequence) + return factorySpace(effects, closingSequenceEnd, types.lineSuffix)(code) + } + + function closingSequenceEnd(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.setextHeadingLine) + return ok(code) + } + + return nok(code) + } +} + +module.exports = setextUnderline diff --git a/node_modules/micromark/lib/tokenize/setext-underline.mjs b/node_modules/micromark/lib/tokenize/setext-underline.mjs new file mode 100644 index 00000000..6724846b --- /dev/null +++ b/node_modules/micromark/lib/tokenize/setext-underline.mjs @@ -0,0 +1,129 @@ +var setextUnderline = { + name: 'setextUnderline', + tokenize: tokenizeSetextUnderline, + resolveTo: resolveToSetextUnderline +} +export default setextUnderline + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import types from '../constant/types.mjs' +import shallow from '../util/shallow.mjs' +import spaceFactory from './factory-space.mjs' + +function resolveToSetextUnderline(events, context) { + var index = events.length + var content + var text + var definition + var heading + + // Find the opening of the content. + // It’ll always exist: we don’t tokenize if it isn’t there. + while (index--) { + if (events[index][0] === 'enter') { + if (events[index][1].type === types.content) { + content = index + break + } + + if (events[index][1].type === types.paragraph) { + text = index + } + } + // Exit + else { + if (events[index][1].type === types.content) { + // Remove the content end (if needed we’ll add it later) + events.splice(index, 1) + } + + if (!definition && events[index][1].type === types.definition) { + definition = index + } + } + } + + heading = { + type: types.setextHeading, + start: shallow(events[text][1].start), + end: shallow(events[events.length - 1][1].end) + } + + // Change the paragraph to setext heading text. + events[text][1].type = types.setextHeadingText + + // If we have definitions in the content, we’ll keep on having content, + // but we need move it. + if (definition) { + events.splice(text, 0, ['enter', heading, context]) + events.splice(definition + 1, 0, ['exit', events[content][1], context]) + events[content][1].end = shallow(events[definition][1].end) + } else { + events[content][1] = heading + } + + // Add the heading exit at the end. + events.push(['exit', heading, context]) + + return events +} + +function tokenizeSetextUnderline(effects, ok, nok) { + var self = this + var index = self.events.length + var marker + var paragraph + + // Find an opening. + while (index--) { + // Skip enter/exit of line ending, line prefix, and content. + // We can now either have a definition or a paragraph. + if ( + self.events[index][1].type !== types.lineEnding && + self.events[index][1].type !== types.linePrefix && + self.events[index][1].type !== types.content + ) { + paragraph = self.events[index][1].type === types.paragraph + break + } + } + + return start + + function start(code) { + assert( + code === codes.dash || code === codes.equalsTo, + 'expected `=` or `-`' + ) + + if (!self.lazy && (self.interrupt || paragraph)) { + effects.enter(types.setextHeadingLine) + effects.enter(types.setextHeadingLineSequence) + marker = code + return closingSequence(code) + } + + return nok(code) + } + + function closingSequence(code) { + if (code === marker) { + effects.consume(code) + return closingSequence + } + + effects.exit(types.setextHeadingLineSequence) + return spaceFactory(effects, closingSequenceEnd, types.lineSuffix)(code) + } + + function closingSequenceEnd(code) { + if (code === codes.eof || markdownLineEnding(code)) { + effects.exit(types.setextHeadingLine) + return ok(code) + } + + return nok(code) + } +} diff --git a/node_modules/micromark/lib/tokenize/thematic-break.js b/node_modules/micromark/lib/tokenize/thematic-break.js new file mode 100644 index 00000000..a927a51c --- /dev/null +++ b/node_modules/micromark/lib/tokenize/thematic-break.js @@ -0,0 +1,74 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var markdownSpace = require('../character/markdown-space.js') +var constants = require('../constant/constants.js') +var types = require('../constant/types.js') +var factorySpace = require('./factory-space.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +var thematicBreak = { + name: 'thematicBreak', + tokenize: tokenizeThematicBreak +} + +function tokenizeThematicBreak(effects, ok, nok) { + var size = 0 + var marker + + return start + + function start(code) { + assert__default['default']( + code === codes.asterisk || + code === codes.dash || + code === codes.underscore, + 'expected `*`, `-`, or `_`' + ) + + effects.enter(types.thematicBreak) + marker = code + return atBreak(code) + } + + function atBreak(code) { + if (code === marker) { + effects.enter(types.thematicBreakSequence) + return sequence(code) + } + + if (markdownSpace(code)) { + return factorySpace(effects, atBreak, types.whitespace)(code) + } + + if ( + size < constants.thematicBreakMarkerCountMin || + (code !== codes.eof && !markdownLineEnding(code)) + ) { + return nok(code) + } + + effects.exit(types.thematicBreak) + return ok(code) + } + + function sequence(code) { + if (code === marker) { + effects.consume(code) + size++ + return sequence + } + + effects.exit(types.thematicBreakSequence) + return atBreak(code) + } +} + +module.exports = thematicBreak diff --git a/node_modules/micromark/lib/tokenize/thematic-break.mjs b/node_modules/micromark/lib/tokenize/thematic-break.mjs new file mode 100644 index 00000000..58c4d782 --- /dev/null +++ b/node_modules/micromark/lib/tokenize/thematic-break.mjs @@ -0,0 +1,65 @@ +var thematicBreak = { + name: 'thematicBreak', + tokenize: tokenizeThematicBreak +} +export default thematicBreak + +import assert from 'assert' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import markdownSpace from '../character/markdown-space.mjs' +import constants from '../constant/constants.mjs' +import types from '../constant/types.mjs' +import spaceFactory from './factory-space.mjs' + +function tokenizeThematicBreak(effects, ok, nok) { + var size = 0 + var marker + + return start + + function start(code) { + assert( + code === codes.asterisk || + code === codes.dash || + code === codes.underscore, + 'expected `*`, `-`, or `_`' + ) + + effects.enter(types.thematicBreak) + marker = code + return atBreak(code) + } + + function atBreak(code) { + if (code === marker) { + effects.enter(types.thematicBreakSequence) + return sequence(code) + } + + if (markdownSpace(code)) { + return spaceFactory(effects, atBreak, types.whitespace)(code) + } + + if ( + size < constants.thematicBreakMarkerCountMin || + (code !== codes.eof && !markdownLineEnding(code)) + ) { + return nok(code) + } + + effects.exit(types.thematicBreak) + return ok(code) + } + + function sequence(code) { + if (code === marker) { + effects.consume(code) + size++ + return sequence + } + + effects.exit(types.thematicBreakSequence) + return atBreak(code) + } +} diff --git a/node_modules/micromark/lib/util/chunked-push.js b/node_modules/micromark/lib/util/chunked-push.js new file mode 100644 index 00000000..77689779 --- /dev/null +++ b/node_modules/micromark/lib/util/chunked-push.js @@ -0,0 +1,14 @@ +'use strict' + +var chunkedSplice = require('./chunked-splice.js') + +function chunkedPush(list, items) { + if (list.length) { + chunkedSplice(list, list.length, 0, items) + return list + } + + return items +} + +module.exports = chunkedPush diff --git a/node_modules/micromark/lib/util/chunked-push.mjs b/node_modules/micromark/lib/util/chunked-push.mjs new file mode 100644 index 00000000..3c84d8b7 --- /dev/null +++ b/node_modules/micromark/lib/util/chunked-push.mjs @@ -0,0 +1,12 @@ +export default chunkedPush + +import chunkedSplice from './chunked-splice.mjs' + +function chunkedPush(list, items) { + if (list.length) { + chunkedSplice(list, list.length, 0, items) + return list + } + + return items +} diff --git a/node_modules/micromark/lib/util/chunked-splice.js b/node_modules/micromark/lib/util/chunked-splice.js new file mode 100644 index 00000000..5a3246d8 --- /dev/null +++ b/node_modules/micromark/lib/util/chunked-splice.js @@ -0,0 +1,46 @@ +'use strict' + +var constants = require('../constant/constants.js') +var splice = require('../constant/splice.js') + +// `Array#splice` takes all items to be inserted as individual argument which +// causes a stack overflow in V8 when trying to insert 100k items for instance. +function chunkedSplice(list, start, remove, items) { + var end = list.length + var chunkStart = 0 + var parameters + + // Make start between zero and `end` (included). + if (start < 0) { + start = -start > end ? 0 : end + start + } else { + start = start > end ? end : start + } + + remove = remove > 0 ? remove : 0 + + // No need to chunk the items if there’s only a couple (10k) items. + if (items.length < constants.v8MaxSafeChunkSize) { + parameters = Array.from(items) + parameters.unshift(start, remove) + splice.apply(list, parameters) + } else { + // Delete `remove` items starting from `start` + if (remove) splice.apply(list, [start, remove]) + + // Insert the items in chunks to not cause stack overflows. + while (chunkStart < items.length) { + parameters = items.slice( + chunkStart, + chunkStart + constants.v8MaxSafeChunkSize + ) + parameters.unshift(start, 0) + splice.apply(list, parameters) + + chunkStart += constants.v8MaxSafeChunkSize + start += constants.v8MaxSafeChunkSize + } + } +} + +module.exports = chunkedSplice diff --git a/node_modules/micromark/lib/util/chunked-splice.mjs b/node_modules/micromark/lib/util/chunked-splice.mjs new file mode 100644 index 00000000..0bda9533 --- /dev/null +++ b/node_modules/micromark/lib/util/chunked-splice.mjs @@ -0,0 +1,44 @@ +export default chunkedSplice + +import constants from '../constant/constants.mjs' +import splice from '../constant/splice.mjs' + +// `Array#splice` takes all items to be inserted as individual argument which +// causes a stack overflow in V8 when trying to insert 100k items for instance. +function chunkedSplice(list, start, remove, items) { + var end = list.length + var chunkStart = 0 + var parameters + + // Make start between zero and `end` (included). + if (start < 0) { + start = -start > end ? 0 : end + start + } else { + start = start > end ? end : start + } + + remove = remove > 0 ? remove : 0 + + // No need to chunk the items if there’s only a couple (10k) items. + if (items.length < constants.v8MaxSafeChunkSize) { + parameters = Array.from(items) + parameters.unshift(start, remove) + splice.apply(list, parameters) + } else { + // Delete `remove` items starting from `start` + if (remove) splice.apply(list, [start, remove]) + + // Insert the items in chunks to not cause stack overflows. + while (chunkStart < items.length) { + parameters = items.slice( + chunkStart, + chunkStart + constants.v8MaxSafeChunkSize + ) + parameters.unshift(start, 0) + splice.apply(list, parameters) + + chunkStart += constants.v8MaxSafeChunkSize + start += constants.v8MaxSafeChunkSize + } + } +} diff --git a/node_modules/micromark/lib/util/classify-character.js b/node_modules/micromark/lib/util/classify-character.js new file mode 100644 index 00000000..3c73c41f --- /dev/null +++ b/node_modules/micromark/lib/util/classify-character.js @@ -0,0 +1,27 @@ +'use strict' + +var codes = require('../character/codes.js') +var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js') +var unicodePunctuation = require('../character/unicode-punctuation.js') +var unicodeWhitespace = require('../character/unicode-whitespace.js') +var constants = require('../constant/constants.js') + +// Classify whether a character is unicode whitespace, unicode punctuation, or +// anything else. +// Used for attention (emphasis, strong), whose sequences can open or close +// based on the class of surrounding characters. +function classifyCharacter(code) { + if ( + code === codes.eof || + markdownLineEndingOrSpace(code) || + unicodeWhitespace(code) + ) { + return constants.characterGroupWhitespace + } + + if (unicodePunctuation(code)) { + return constants.characterGroupPunctuation + } +} + +module.exports = classifyCharacter diff --git a/node_modules/micromark/lib/util/classify-character.mjs b/node_modules/micromark/lib/util/classify-character.mjs new file mode 100644 index 00000000..f701c8e0 --- /dev/null +++ b/node_modules/micromark/lib/util/classify-character.mjs @@ -0,0 +1,25 @@ +export default classifyCharacter + +import codes from '../character/codes.mjs' +import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs' +import unicodePunctuation from '../character/unicode-punctuation.mjs' +import unicodeWhitespace from '../character/unicode-whitespace.mjs' +import constants from '../constant/constants.mjs' + +// Classify whether a character is unicode whitespace, unicode punctuation, or +// anything else. +// Used for attention (emphasis, strong), whose sequences can open or close +// based on the class of surrounding characters. +function classifyCharacter(code) { + if ( + code === codes.eof || + markdownLineEndingOrSpace(code) || + unicodeWhitespace(code) + ) { + return constants.characterGroupWhitespace + } + + if (unicodePunctuation(code)) { + return constants.characterGroupPunctuation + } +} diff --git a/node_modules/micromark/lib/util/combine-extensions.js b/node_modules/micromark/lib/util/combine-extensions.js new file mode 100644 index 00000000..830ec3bf --- /dev/null +++ b/node_modules/micromark/lib/util/combine-extensions.js @@ -0,0 +1,50 @@ +'use strict' + +var hasOwnProperty = require('../constant/has-own-property.js') +var chunkedSplice = require('./chunked-splice.js') +var miniflat = require('./miniflat.js') + +// Combine several syntax extensions into one. +function combineExtensions(extensions) { + var all = {} + var index = -1 + + while (++index < extensions.length) { + extension(all, extensions[index]) + } + + return all +} + +function extension(all, extension) { + var hook + var left + var right + var code + + for (hook in extension) { + left = hasOwnProperty.call(all, hook) ? all[hook] : (all[hook] = {}) + right = extension[hook] + + for (code in right) { + left[code] = constructs( + miniflat(right[code]), + hasOwnProperty.call(left, code) ? left[code] : [] + ) + } + } +} + +function constructs(list, existing) { + var index = -1 + var before = [] + + while (++index < list.length) { + ;(list[index].add === 'after' ? existing : before).push(list[index]) + } + + chunkedSplice(existing, 0, 0, before) + return existing +} + +module.exports = combineExtensions diff --git a/node_modules/micromark/lib/util/combine-extensions.mjs b/node_modules/micromark/lib/util/combine-extensions.mjs new file mode 100644 index 00000000..605652a8 --- /dev/null +++ b/node_modules/micromark/lib/util/combine-extensions.mjs @@ -0,0 +1,48 @@ +export default combineExtensions + +import own from '../constant/has-own-property.mjs' +import chunkedSplice from './chunked-splice.mjs' +import miniflat from './miniflat.mjs' + +// Combine several syntax extensions into one. +function combineExtensions(extensions) { + var all = {} + var index = -1 + + while (++index < extensions.length) { + extension(all, extensions[index]) + } + + return all +} + +function extension(all, extension) { + var hook + var left + var right + var code + + for (hook in extension) { + left = own.call(all, hook) ? all[hook] : (all[hook] = {}) + right = extension[hook] + + for (code in right) { + left[code] = constructs( + miniflat(right[code]), + own.call(left, code) ? left[code] : [] + ) + } + } +} + +function constructs(list, existing) { + var index = -1 + var before = [] + + while (++index < list.length) { + ;(list[index].add === 'after' ? existing : before).push(list[index]) + } + + chunkedSplice(existing, 0, 0, before) + return existing +} diff --git a/node_modules/micromark/lib/util/combine-html-extensions.js b/node_modules/micromark/lib/util/combine-html-extensions.js new file mode 100644 index 00000000..c4fdadaf --- /dev/null +++ b/node_modules/micromark/lib/util/combine-html-extensions.js @@ -0,0 +1,35 @@ +'use strict' + +var hasOwnProperty = require('../constant/has-own-property.js') + +// Combine several HTML extensions into one. +function combineHtmlExtensions(extensions) { + var handlers = {} + var index = -1 + + while (++index < extensions.length) { + extension(handlers, extensions[index]) + } + + return handlers +} + +function extension(handlers, extension) { + var hook + var left + var right + var type + + for (hook in extension) { + left = hasOwnProperty.call(handlers, hook) + ? handlers[hook] + : (handlers[hook] = {}) + right = extension[hook] + + for (type in right) { + left[type] = right[type] + } + } +} + +module.exports = combineHtmlExtensions diff --git a/node_modules/micromark/lib/util/combine-html-extensions.mjs b/node_modules/micromark/lib/util/combine-html-extensions.mjs new file mode 100644 index 00000000..d7e54e75 --- /dev/null +++ b/node_modules/micromark/lib/util/combine-html-extensions.mjs @@ -0,0 +1,31 @@ +export default combineHtmlExtensions + +import own from '../constant/has-own-property.mjs' + +// Combine several HTML extensions into one. +function combineHtmlExtensions(extensions) { + var handlers = {} + var index = -1 + + while (++index < extensions.length) { + extension(handlers, extensions[index]) + } + + return handlers +} + +function extension(handlers, extension) { + var hook + var left + var right + var type + + for (hook in extension) { + left = own.call(handlers, hook) ? handlers[hook] : (handlers[hook] = {}) + right = extension[hook] + + for (type in right) { + left[type] = right[type] + } + } +} diff --git a/node_modules/micromark/lib/util/create-tokenizer.js b/node_modules/micromark/lib/util/create-tokenizer.js new file mode 100644 index 00000000..dac89014 --- /dev/null +++ b/node_modules/micromark/lib/util/create-tokenizer.js @@ -0,0 +1,440 @@ +'use strict' + +var assert = require('assert') +var createDebug = require('debug') +var assign = require('../constant/assign.js') +var codes = require('../character/codes.js') +var markdownLineEnding = require('../character/markdown-line-ending.js') +var chunkedPush = require('./chunked-push.js') +var chunkedSplice = require('./chunked-splice.js') +var miniflat = require('./miniflat.js') +var resolveAll = require('./resolve-all.js') +var serializeChunks = require('./serialize-chunks.js') +var shallow = require('./shallow.js') +var sliceChunks = require('./slice-chunks.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) +var createDebug__default = /*#__PURE__*/ _interopDefaultLegacy(createDebug) + +var debug = createDebug__default['default']('micromark') + +// Create a tokenizer. +// Tokenizers deal with one type of data (e.g., containers, flow, text). +// The parser is the object dealing with it all. +// `initialize` works like other constructs, except that only its `tokenize` +// function is used, in which case it doesn’t receive an `ok` or `nok`. +// `from` can be given to set the point before the first character, although +// when further lines are indented, they must be set with `defineSkip`. +function createTokenizer(parser, initialize, from) { + var point = from ? shallow(from) : {line: 1, column: 1, offset: 0} + var columnStart = {} + var resolveAllConstructs = [] + var chunks = [] + var stack = [] + var consumed = true + + // Tools used for tokenizing. + var effects = { + consume: consume, + enter: enter, + exit: exit, + attempt: constructFactory(onsuccessfulconstruct), + check: constructFactory(onsuccessfulcheck), + interrupt: constructFactory(onsuccessfulcheck, {interrupt: true}), + lazy: constructFactory(onsuccessfulcheck, {lazy: true}) + } + + // State and tools for resolving and serializing. + var context = { + previous: codes.eof, + events: [], + parser: parser, + sliceStream: sliceStream, + sliceSerialize: sliceSerialize, + now: now, + defineSkip: skip, + write: write + } + + // The state function. + var state = initialize.tokenize.call(context, effects) + + // Track which character we expect to be consumed, to catch bugs. + var expectedCode + + if (initialize.resolveAll) { + resolveAllConstructs.push(initialize) + } + + // Store where we are in the input stream. + point._index = 0 + point._bufferIndex = -1 + + return context + + function write(slice) { + chunks = chunkedPush(chunks, slice) + + main() + + // Exit if we’re not done, resolve might change stuff. + if (chunks[chunks.length - 1] !== codes.eof) { + return [] + } + + addResult(initialize, 0) + + // Otherwise, resolve, and exit. + context.events = resolveAll(resolveAllConstructs, context.events, context) + + return context.events + } + + // + // Tools. + // + + function sliceSerialize(token) { + return serializeChunks(sliceStream(token)) + } + + function sliceStream(token) { + return sliceChunks(chunks, token) + } + + function now() { + return shallow(point) + } + + function skip(value) { + columnStart[value.line] = value.column + accountForPotentialSkip() + debug('position: define skip: `%j`', point) + } + + // + // State management. + // + + // Main loop (note that `_index` and `_bufferIndex` in `point` are modified by + // `consume`). + // Here is where we walk through the chunks, which either include strings of + // several characters, or numerical character codes. + // The reason to do this in a loop instead of a call is so the stack can + // drain. + function main() { + var chunkIndex + var chunk + + while (point._index < chunks.length) { + chunk = chunks[point._index] + + // If we’re in a buffer chunk, loop through it. + if (typeof chunk === 'string') { + chunkIndex = point._index + + if (point._bufferIndex < 0) { + point._bufferIndex = 0 + } + + while ( + point._index === chunkIndex && + point._bufferIndex < chunk.length + ) { + go(chunk.charCodeAt(point._bufferIndex)) + } + } else { + go(chunk) + } + } + } + + // Deal with one code. + function go(code) { + assert__default['default'].equal( + consumed, + true, + 'expected character to be consumed' + ) + consumed = undefined + debug('main: passing `%s` to %s', code, state.name) + expectedCode = code + state = state(code) + } + + // Move a character forward. + function consume(code) { + assert__default['default'].equal( + code, + expectedCode, + 'expected given code to equal expected code' + ) + + debug('consume: `%s`', code) + + assert__default['default'].equal( + consumed, + undefined, + 'expected code to not have been consumed' + ) + assert__default['default']( + code === null + ? !context.events.length || + context.events[context.events.length - 1][0] === 'exit' + : context.events[context.events.length - 1][0] === 'enter', + 'expected last token to be open' + ) + + if (markdownLineEnding(code)) { + point.line++ + point.column = 1 + point.offset += code === codes.carriageReturnLineFeed ? 2 : 1 + accountForPotentialSkip() + debug('position: after eol: `%j`', point) + } else if (code !== codes.virtualSpace) { + point.column++ + point.offset++ + } + + // Not in a string chunk. + if (point._bufferIndex < 0) { + point._index++ + } else { + point._bufferIndex++ + + // At end of string chunk. + if (point._bufferIndex === chunks[point._index].length) { + point._bufferIndex = -1 + point._index++ + } + } + + // Expose the previous character. + context.previous = code + + // Mark as consumed. + consumed = true + } + + // Start a token. + function enter(type, fields) { + var token = fields || {} + token.type = type + token.start = now() + + assert__default['default'].equal( + typeof type, + 'string', + 'expected string type' + ) + assert__default['default'].notEqual( + type.length, + 0, + 'expected non-empty string' + ) + debug('enter: `%s`', type) + + context.events.push(['enter', token, context]) + + stack.push(token) + + return token + } + + // Stop a token. + function exit(type) { + assert__default['default'].equal( + typeof type, + 'string', + 'expected string type' + ) + assert__default['default'].notEqual( + type.length, + 0, + 'expected non-empty string' + ) + assert__default['default'].notEqual( + stack.length, + 0, + 'cannot close w/o open tokens' + ) + + var token = stack.pop() + token.end = now() + + assert__default['default'].equal( + type, + token.type, + 'expected exit token to match current token' + ) + + assert__default['default']( + !( + token.start._index === token.end._index && + token.start._bufferIndex === token.end._bufferIndex + ), + 'expected non-empty token (`' + type + '`)' + ) + + debug('exit: `%s`', token.type) + context.events.push(['exit', token, context]) + + return token + } + + // Use results. + function onsuccessfulconstruct(construct, info) { + addResult(construct, info.from) + } + + // Discard results. + function onsuccessfulcheck(construct, info) { + info.restore() + } + + // Factory to attempt/check/interrupt. + function constructFactory(onreturn, fields) { + return hook + + // Handle either an object mapping codes to constructs, a list of + // constructs, or a single construct. + function hook(constructs, returnState, bogusState) { + var listOfConstructs + var constructIndex + var currentConstruct + var info + + return constructs.tokenize || 'length' in constructs + ? handleListOfConstructs(miniflat(constructs)) + : handleMapOfConstructs + + function handleMapOfConstructs(code) { + if (code in constructs || codes.eof in constructs) { + return handleListOfConstructs( + constructs.null + ? /* c8 ignore next */ + miniflat(constructs[code]).concat(miniflat(constructs.null)) + : constructs[code] + )(code) + } + + return bogusState(code) + } + + function handleListOfConstructs(list) { + listOfConstructs = list + constructIndex = 0 + return handleConstruct(list[constructIndex]) + } + + function handleConstruct(construct) { + return start + + function start(code) { + // To do: not nede to store if there is no bogus state, probably? + // Currently doesn’t work because `inspect` in document does a check + // w/o a bogus, which doesn’t make sense. But it does seem to help perf + // by not storing. + info = store() + currentConstruct = construct + + if (!construct.partial) { + context.currentConstruct = construct + } + + if ( + construct.name && + context.parser.constructs.disable.null.indexOf(construct.name) > -1 + ) { + return nok(code) + } + + return construct.tokenize.call( + fields ? assign({}, context, fields) : context, + effects, + ok, + nok + )(code) + } + } + + function ok(code) { + assert__default['default'].equal(code, expectedCode, 'expected code') + consumed = true + onreturn(currentConstruct, info) + return returnState + } + + function nok(code) { + assert__default['default'].equal(code, expectedCode, 'expected code') + consumed = true + info.restore() + + if (++constructIndex < listOfConstructs.length) { + return handleConstruct(listOfConstructs[constructIndex]) + } + + return bogusState + } + } + } + + function addResult(construct, from) { + if (construct.resolveAll && resolveAllConstructs.indexOf(construct) < 0) { + resolveAllConstructs.push(construct) + } + + if (construct.resolve) { + chunkedSplice( + context.events, + from, + context.events.length - from, + construct.resolve(context.events.slice(from), context) + ) + } + + if (construct.resolveTo) { + context.events = construct.resolveTo(context.events, context) + } + + assert__default['default']( + construct.partial || + !context.events.length || + context.events[context.events.length - 1][0] === 'exit', + 'expected last token to end' + ) + } + + function store() { + var startPoint = now() + var startPrevious = context.previous + var startCurrentConstruct = context.currentConstruct + var startEventsIndex = context.events.length + var startStack = Array.from(stack) + + return {restore: restore, from: startEventsIndex} + + function restore() { + point = startPoint + context.previous = startPrevious + context.currentConstruct = startCurrentConstruct + context.events.length = startEventsIndex + stack = startStack + accountForPotentialSkip() + debug('position: restore: `%j`', point) + } + } + + function accountForPotentialSkip() { + if (point.line in columnStart && point.column < 2) { + point.column = columnStart[point.line] + point.offset += columnStart[point.line] - 1 + } + } +} + +module.exports = createTokenizer diff --git a/node_modules/micromark/lib/util/create-tokenizer.mjs b/node_modules/micromark/lib/util/create-tokenizer.mjs new file mode 100644 index 00000000..6e8808ea --- /dev/null +++ b/node_modules/micromark/lib/util/create-tokenizer.mjs @@ -0,0 +1,399 @@ +export default createTokenizer + +import assert from 'assert' +import createDebug from 'debug' +import assign from '../constant/assign.mjs' +import codes from '../character/codes.mjs' +import markdownLineEnding from '../character/markdown-line-ending.mjs' +import chunkedPush from './chunked-push.mjs' +import chunkedSplice from './chunked-splice.mjs' +import miniflat from './miniflat.mjs' +import resolveAll from './resolve-all.mjs' +import serializeChunks from './serialize-chunks.mjs' +import shallow from './shallow.mjs' +import sliceChunks from './slice-chunks.mjs' + +var debug = createDebug('micromark') + +// Create a tokenizer. +// Tokenizers deal with one type of data (e.g., containers, flow, text). +// The parser is the object dealing with it all. +// `initialize` works like other constructs, except that only its `tokenize` +// function is used, in which case it doesn’t receive an `ok` or `nok`. +// `from` can be given to set the point before the first character, although +// when further lines are indented, they must be set with `defineSkip`. +function createTokenizer(parser, initialize, from) { + var point = from ? shallow(from) : {line: 1, column: 1, offset: 0} + var columnStart = {} + var resolveAllConstructs = [] + var chunks = [] + var stack = [] + var consumed = true + + // Tools used for tokenizing. + var effects = { + consume: consume, + enter: enter, + exit: exit, + attempt: constructFactory(onsuccessfulconstruct), + check: constructFactory(onsuccessfulcheck), + interrupt: constructFactory(onsuccessfulcheck, {interrupt: true}), + lazy: constructFactory(onsuccessfulcheck, {lazy: true}) + } + + // State and tools for resolving and serializing. + var context = { + previous: codes.eof, + events: [], + parser: parser, + sliceStream: sliceStream, + sliceSerialize: sliceSerialize, + now: now, + defineSkip: skip, + write: write + } + + // The state function. + var state = initialize.tokenize.call(context, effects) + + // Track which character we expect to be consumed, to catch bugs. + var expectedCode + + if (initialize.resolveAll) { + resolveAllConstructs.push(initialize) + } + + // Store where we are in the input stream. + point._index = 0 + point._bufferIndex = -1 + + return context + + function write(slice) { + chunks = chunkedPush(chunks, slice) + + main() + + // Exit if we’re not done, resolve might change stuff. + if (chunks[chunks.length - 1] !== codes.eof) { + return [] + } + + addResult(initialize, 0) + + // Otherwise, resolve, and exit. + context.events = resolveAll(resolveAllConstructs, context.events, context) + + return context.events + } + + // + // Tools. + // + + function sliceSerialize(token) { + return serializeChunks(sliceStream(token)) + } + + function sliceStream(token) { + return sliceChunks(chunks, token) + } + + function now() { + return shallow(point) + } + + function skip(value) { + columnStart[value.line] = value.column + accountForPotentialSkip() + debug('position: define skip: `%j`', point) + } + + // + // State management. + // + + // Main loop (note that `_index` and `_bufferIndex` in `point` are modified by + // `consume`). + // Here is where we walk through the chunks, which either include strings of + // several characters, or numerical character codes. + // The reason to do this in a loop instead of a call is so the stack can + // drain. + function main() { + var chunkIndex + var chunk + + while (point._index < chunks.length) { + chunk = chunks[point._index] + + // If we’re in a buffer chunk, loop through it. + if (typeof chunk === 'string') { + chunkIndex = point._index + + if (point._bufferIndex < 0) { + point._bufferIndex = 0 + } + + while ( + point._index === chunkIndex && + point._bufferIndex < chunk.length + ) { + go(chunk.charCodeAt(point._bufferIndex)) + } + } else { + go(chunk) + } + } + } + + // Deal with one code. + function go(code) { + assert.equal(consumed, true, 'expected character to be consumed') + consumed = undefined + debug('main: passing `%s` to %s', code, state.name) + expectedCode = code + state = state(code) + } + + // Move a character forward. + function consume(code) { + assert.equal( + code, + expectedCode, + 'expected given code to equal expected code' + ) + + debug('consume: `%s`', code) + + assert.equal(consumed, undefined, 'expected code to not have been consumed') + assert( + code === null + ? !context.events.length || + context.events[context.events.length - 1][0] === 'exit' + : context.events[context.events.length - 1][0] === 'enter', + 'expected last token to be open' + ) + + if (markdownLineEnding(code)) { + point.line++ + point.column = 1 + point.offset += code === codes.carriageReturnLineFeed ? 2 : 1 + accountForPotentialSkip() + debug('position: after eol: `%j`', point) + } else if (code !== codes.virtualSpace) { + point.column++ + point.offset++ + } + + // Not in a string chunk. + if (point._bufferIndex < 0) { + point._index++ + } else { + point._bufferIndex++ + + // At end of string chunk. + if (point._bufferIndex === chunks[point._index].length) { + point._bufferIndex = -1 + point._index++ + } + } + + // Expose the previous character. + context.previous = code + + // Mark as consumed. + consumed = true + } + + // Start a token. + function enter(type, fields) { + var token = fields || {} + token.type = type + token.start = now() + + assert.equal(typeof type, 'string', 'expected string type') + assert.notEqual(type.length, 0, 'expected non-empty string') + debug('enter: `%s`', type) + + context.events.push(['enter', token, context]) + + stack.push(token) + + return token + } + + // Stop a token. + function exit(type) { + assert.equal(typeof type, 'string', 'expected string type') + assert.notEqual(type.length, 0, 'expected non-empty string') + assert.notEqual(stack.length, 0, 'cannot close w/o open tokens') + + var token = stack.pop() + token.end = now() + + assert.equal(type, token.type, 'expected exit token to match current token') + + assert( + !( + token.start._index === token.end._index && + token.start._bufferIndex === token.end._bufferIndex + ), + 'expected non-empty token (`' + type + '`)' + ) + + debug('exit: `%s`', token.type) + context.events.push(['exit', token, context]) + + return token + } + + // Use results. + function onsuccessfulconstruct(construct, info) { + addResult(construct, info.from) + } + + // Discard results. + function onsuccessfulcheck(construct, info) { + info.restore() + } + + // Factory to attempt/check/interrupt. + function constructFactory(onreturn, fields) { + return hook + + // Handle either an object mapping codes to constructs, a list of + // constructs, or a single construct. + function hook(constructs, returnState, bogusState) { + var listOfConstructs + var constructIndex + var currentConstruct + var info + + return constructs.tokenize || 'length' in constructs + ? handleListOfConstructs(miniflat(constructs)) + : handleMapOfConstructs + + function handleMapOfConstructs(code) { + if (code in constructs || codes.eof in constructs) { + return handleListOfConstructs( + constructs.null + ? /* c8 ignore next */ + miniflat(constructs[code]).concat(miniflat(constructs.null)) + : constructs[code] + )(code) + } + + return bogusState(code) + } + + function handleListOfConstructs(list) { + listOfConstructs = list + constructIndex = 0 + return handleConstruct(list[constructIndex]) + } + + function handleConstruct(construct) { + return start + + function start(code) { + // To do: not nede to store if there is no bogus state, probably? + // Currently doesn’t work because `inspect` in document does a check + // w/o a bogus, which doesn’t make sense. But it does seem to help perf + // by not storing. + info = store() + currentConstruct = construct + + if (!construct.partial) { + context.currentConstruct = construct + } + + if ( + construct.name && + context.parser.constructs.disable.null.indexOf(construct.name) > -1 + ) { + return nok(code) + } + + return construct.tokenize.call( + fields ? assign({}, context, fields) : context, + effects, + ok, + nok + )(code) + } + } + + function ok(code) { + assert.equal(code, expectedCode, 'expected code') + consumed = true + onreturn(currentConstruct, info) + return returnState + } + + function nok(code) { + assert.equal(code, expectedCode, 'expected code') + consumed = true + info.restore() + + if (++constructIndex < listOfConstructs.length) { + return handleConstruct(listOfConstructs[constructIndex]) + } + + return bogusState + } + } + } + + function addResult(construct, from) { + if (construct.resolveAll && resolveAllConstructs.indexOf(construct) < 0) { + resolveAllConstructs.push(construct) + } + + if (construct.resolve) { + chunkedSplice( + context.events, + from, + context.events.length - from, + construct.resolve(context.events.slice(from), context) + ) + } + + if (construct.resolveTo) { + context.events = construct.resolveTo(context.events, context) + } + + assert( + construct.partial || + !context.events.length || + context.events[context.events.length - 1][0] === 'exit', + 'expected last token to end' + ) + } + + function store() { + var startPoint = now() + var startPrevious = context.previous + var startCurrentConstruct = context.currentConstruct + var startEventsIndex = context.events.length + var startStack = Array.from(stack) + + return {restore: restore, from: startEventsIndex} + + function restore() { + point = startPoint + context.previous = startPrevious + context.currentConstruct = startCurrentConstruct + context.events.length = startEventsIndex + stack = startStack + accountForPotentialSkip() + debug('position: restore: `%j`', point) + } + } + + function accountForPotentialSkip() { + if (point.line in columnStart && point.column < 2) { + point.column = columnStart[point.line] + point.offset += columnStart[point.line] - 1 + } + } +} diff --git a/node_modules/micromark/lib/util/miniflat.js b/node_modules/micromark/lib/util/miniflat.js new file mode 100644 index 00000000..39c5dd4f --- /dev/null +++ b/node_modules/micromark/lib/util/miniflat.js @@ -0,0 +1,11 @@ +'use strict' + +function miniflat(value) { + return value === null || value === undefined + ? [] + : 'length' in value + ? value + : [value] +} + +module.exports = miniflat diff --git a/node_modules/micromark/lib/util/miniflat.mjs b/node_modules/micromark/lib/util/miniflat.mjs new file mode 100644 index 00000000..7fad196c --- /dev/null +++ b/node_modules/micromark/lib/util/miniflat.mjs @@ -0,0 +1,9 @@ +export default miniflat + +function miniflat(value) { + return value === null || value === undefined + ? [] + : 'length' in value + ? value + : [value] +} diff --git a/node_modules/micromark/lib/util/move-point.js b/node_modules/micromark/lib/util/move-point.js new file mode 100644 index 00000000..830807fb --- /dev/null +++ b/node_modules/micromark/lib/util/move-point.js @@ -0,0 +1,12 @@ +'use strict' + +// Note! `move` only works inside lines! It’s not possible to move past other +// chunks (replacement characters, tabs, or line endings). +function movePoint(point, offset) { + point.column += offset + point.offset += offset + point._bufferIndex += offset + return point +} + +module.exports = movePoint diff --git a/node_modules/micromark/lib/util/move-point.mjs b/node_modules/micromark/lib/util/move-point.mjs new file mode 100644 index 00000000..8192df49 --- /dev/null +++ b/node_modules/micromark/lib/util/move-point.mjs @@ -0,0 +1,10 @@ +export default movePoint + +// Note! `move` only works inside lines! It’s not possible to move past other +// chunks (replacement characters, tabs, or line endings). +function movePoint(point, offset) { + point.column += offset + point.offset += offset + point._bufferIndex += offset + return point +} diff --git a/node_modules/micromark/lib/util/normalize-identifier.js b/node_modules/micromark/lib/util/normalize-identifier.js new file mode 100644 index 00000000..0d9d7c0e --- /dev/null +++ b/node_modules/micromark/lib/util/normalize-identifier.js @@ -0,0 +1,23 @@ +'use strict' + +var values = require('../character/values.js') + +function normalizeIdentifier(value) { + return ( + value + // Collapse Markdown whitespace. + .replace(/[\t\n\r ]+/g, values.space) + // Trim. + .replace(/^ | $/g, '') + // Some characters are considered “uppercase”, but if their lowercase + // counterpart is uppercased will result in a different uppercase + // character. + // Hence, to get that form, we perform both lower- and uppercase. + // Upper case makes sure keys will not interact with default prototypal + // methods: no object method is uppercase. + .toLowerCase() + .toUpperCase() + ) +} + +module.exports = normalizeIdentifier diff --git a/node_modules/micromark/lib/util/normalize-identifier.mjs b/node_modules/micromark/lib/util/normalize-identifier.mjs new file mode 100644 index 00000000..2a383ae1 --- /dev/null +++ b/node_modules/micromark/lib/util/normalize-identifier.mjs @@ -0,0 +1,21 @@ +export default normalizeIdentifier + +import values from '../character/values.mjs' + +function normalizeIdentifier(value) { + return ( + value + // Collapse Markdown whitespace. + .replace(/[\t\n\r ]+/g, values.space) + // Trim. + .replace(/^ | $/g, '') + // Some characters are considered “uppercase”, but if their lowercase + // counterpart is uppercased will result in a different uppercase + // character. + // Hence, to get that form, we perform both lower- and uppercase. + // Upper case makes sure keys will not interact with default prototypal + // methods: no object method is uppercase. + .toLowerCase() + .toUpperCase() + ) +} diff --git a/node_modules/micromark/lib/util/normalize-uri.js b/node_modules/micromark/lib/util/normalize-uri.js new file mode 100644 index 00000000..e4a07c1d --- /dev/null +++ b/node_modules/micromark/lib/util/normalize-uri.js @@ -0,0 +1,70 @@ +'use strict' + +var asciiAlphanumeric = require('../character/ascii-alphanumeric.js') +var codes = require('../character/codes.js') +var values = require('../character/values.js') +var fromCharCode = require('../constant/from-char-code.js') + +// Encode unsafe characters with percent-encoding, skipping already +// encoded sequences. +function normalizeUri(value) { + var index = -1 + var result = [] + var start = 0 + var skip = 0 + var code + var next + var replace + + while (++index < value.length) { + code = value.charCodeAt(index) + + // A correct percent encoded value. + if ( + code === codes.percentSign && + asciiAlphanumeric(value.charCodeAt(index + 1)) && + asciiAlphanumeric(value.charCodeAt(index + 2)) + ) { + skip = 2 + } + // ASCII. + else if (code < 128) { + if (!/[!#$&-;=?-Z_a-z~]/.test(fromCharCode(code))) { + replace = fromCharCode(code) + } + } + // Astral. + else if (code > 55295 && code < 57344) { + next = value.charCodeAt(index + 1) + + // A correct surrogate pair. + if (code < 56320 && next > 56319 && next < 57344) { + replace = fromCharCode(code, next) + skip = 1 + } + // Lone surrogate. + else { + replace = values.replacementCharacter + } + } + // Unicode. + else { + replace = fromCharCode(code) + } + + if (replace) { + result.push(value.slice(start, index), encodeURIComponent(replace)) + start = index + skip + 1 + replace = undefined + } + + if (skip) { + index += skip + skip = 0 + } + } + + return result.join('') + value.slice(start) +} + +module.exports = normalizeUri diff --git a/node_modules/micromark/lib/util/normalize-uri.mjs b/node_modules/micromark/lib/util/normalize-uri.mjs new file mode 100644 index 00000000..31022433 --- /dev/null +++ b/node_modules/micromark/lib/util/normalize-uri.mjs @@ -0,0 +1,68 @@ +export default normalizeUri + +import asciiAlphanumeric from '../character/ascii-alphanumeric.mjs' +import codes from '../character/codes.mjs' +import values from '../character/values.mjs' +import fromCharCode from '../constant/from-char-code.mjs' + +// Encode unsafe characters with percent-encoding, skipping already +// encoded sequences. +function normalizeUri(value) { + var index = -1 + var result = [] + var start = 0 + var skip = 0 + var code + var next + var replace + + while (++index < value.length) { + code = value.charCodeAt(index) + + // A correct percent encoded value. + if ( + code === codes.percentSign && + asciiAlphanumeric(value.charCodeAt(index + 1)) && + asciiAlphanumeric(value.charCodeAt(index + 2)) + ) { + skip = 2 + } + // ASCII. + else if (code < 128) { + if (!/[!#$&-;=?-Z_a-z~]/.test(fromCharCode(code))) { + replace = fromCharCode(code) + } + } + // Astral. + else if (code > 55295 && code < 57344) { + next = value.charCodeAt(index + 1) + + // A correct surrogate pair. + if (code < 56320 && next > 56319 && next < 57344) { + replace = fromCharCode(code, next) + skip = 1 + } + // Lone surrogate. + else { + replace = values.replacementCharacter + } + } + // Unicode. + else { + replace = fromCharCode(code) + } + + if (replace) { + result.push(value.slice(start, index), encodeURIComponent(replace)) + start = index + skip + 1 + replace = undefined + } + + if (skip) { + index += skip + skip = 0 + } + } + + return result.join('') + value.slice(start) +} diff --git a/node_modules/micromark/lib/util/prefix-size.js b/node_modules/micromark/lib/util/prefix-size.js new file mode 100644 index 00000000..a560e3e8 --- /dev/null +++ b/node_modules/micromark/lib/util/prefix-size.js @@ -0,0 +1,11 @@ +'use strict' + +var sizeChunks = require('./size-chunks.js') + +function prefixSize(events, type) { + var tail = events[events.length - 1] + if (!tail || tail[1].type !== type) return 0 + return sizeChunks(tail[2].sliceStream(tail[1])) +} + +module.exports = prefixSize diff --git a/node_modules/micromark/lib/util/prefix-size.mjs b/node_modules/micromark/lib/util/prefix-size.mjs new file mode 100644 index 00000000..473e18a2 --- /dev/null +++ b/node_modules/micromark/lib/util/prefix-size.mjs @@ -0,0 +1,9 @@ +export default prefixSize + +import sizeChunks from './size-chunks.mjs' + +function prefixSize(events, type) { + var tail = events[events.length - 1] + if (!tail || tail[1].type !== type) return 0 + return sizeChunks(tail[2].sliceStream(tail[1])) +} diff --git a/node_modules/micromark/lib/util/regex-check.js b/node_modules/micromark/lib/util/regex-check.js new file mode 100644 index 00000000..895772e6 --- /dev/null +++ b/node_modules/micromark/lib/util/regex-check.js @@ -0,0 +1,12 @@ +'use strict' + +var fromCharCode = require('../constant/from-char-code.js') + +function regexCheck(regex) { + return check + function check(code) { + return regex.test(fromCharCode(code)) + } +} + +module.exports = regexCheck diff --git a/node_modules/micromark/lib/util/regex-check.mjs b/node_modules/micromark/lib/util/regex-check.mjs new file mode 100644 index 00000000..f4bc0fd6 --- /dev/null +++ b/node_modules/micromark/lib/util/regex-check.mjs @@ -0,0 +1,10 @@ +export default regexCheck + +import fromCharCode from '../constant/from-char-code.mjs' + +function regexCheck(regex) { + return check + function check(code) { + return regex.test(fromCharCode(code)) + } +} diff --git a/node_modules/micromark/lib/util/resolve-all.js b/node_modules/micromark/lib/util/resolve-all.js new file mode 100644 index 00000000..3e8d76b4 --- /dev/null +++ b/node_modules/micromark/lib/util/resolve-all.js @@ -0,0 +1,20 @@ +'use strict' + +function resolveAll(constructs, events, context) { + var called = [] + var index = -1 + var resolve + + while (++index < constructs.length) { + resolve = constructs[index].resolveAll + + if (resolve && called.indexOf(resolve) < 0) { + events = resolve(events, context) + called.push(resolve) + } + } + + return events +} + +module.exports = resolveAll diff --git a/node_modules/micromark/lib/util/resolve-all.mjs b/node_modules/micromark/lib/util/resolve-all.mjs new file mode 100644 index 00000000..1a70eebe --- /dev/null +++ b/node_modules/micromark/lib/util/resolve-all.mjs @@ -0,0 +1,18 @@ +export default resolveAll + +function resolveAll(constructs, events, context) { + var called = [] + var index = -1 + var resolve + + while (++index < constructs.length) { + resolve = constructs[index].resolveAll + + if (resolve && called.indexOf(resolve) < 0) { + events = resolve(events, context) + called.push(resolve) + } + } + + return events +} diff --git a/node_modules/micromark/lib/util/safe-from-int.js b/node_modules/micromark/lib/util/safe-from-int.js new file mode 100644 index 00000000..e5e64289 --- /dev/null +++ b/node_modules/micromark/lib/util/safe-from-int.js @@ -0,0 +1,32 @@ +'use strict' + +var codes = require('../character/codes.js') +var values = require('../character/values.js') +var fromCharCode = require('../constant/from-char-code.js') + +function safeFromInt(value, base) { + var code = parseInt(value, base) + + if ( + // C0 except for HT, LF, FF, CR, space + code < codes.ht || + code === codes.vt || + (code > codes.cr && code < codes.space) || + // Control character (DEL) of the basic block and C1 controls. + (code > codes.tilde && code < 160) || + // Lone high surrogates and low surrogates. + (code > 55295 && code < 57344) || + // Noncharacters. + (code > 64975 && code < 65008) || + (code & 65535) === 65535 || + (code & 65535) === 65534 || + // Out of range + code > 1114111 + ) { + return values.replacementCharacter + } + + return fromCharCode(code) +} + +module.exports = safeFromInt diff --git a/node_modules/micromark/lib/util/safe-from-int.mjs b/node_modules/micromark/lib/util/safe-from-int.mjs new file mode 100644 index 00000000..e218d471 --- /dev/null +++ b/node_modules/micromark/lib/util/safe-from-int.mjs @@ -0,0 +1,30 @@ +export default safeFromInt + +import codes from '../character/codes.mjs' +import values from '../character/values.mjs' +import fromCharCode from '../constant/from-char-code.mjs' + +function safeFromInt(value, base) { + var code = parseInt(value, base) + + if ( + // C0 except for HT, LF, FF, CR, space + code < codes.ht || + code === codes.vt || + (code > codes.cr && code < codes.space) || + // Control character (DEL) of the basic block and C1 controls. + (code > codes.tilde && code < 160) || + // Lone high surrogates and low surrogates. + (code > 55295 && code < 57344) || + // Noncharacters. + (code > 64975 && code < 65008) || + (code & 65535) === 65535 || + (code & 65535) === 65534 || + // Out of range + code > 1114111 + ) { + return values.replacementCharacter + } + + return fromCharCode(code) +} diff --git a/node_modules/micromark/lib/util/serialize-chunks.js b/node_modules/micromark/lib/util/serialize-chunks.js new file mode 100644 index 00000000..4d01d915 --- /dev/null +++ b/node_modules/micromark/lib/util/serialize-chunks.js @@ -0,0 +1,54 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var values = require('../character/values.js') +var fromCharCode = require('../constant/from-char-code.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +function serializeChunks(chunks) { + var index = -1 + var result = [] + var chunk + var value + var atTab + + while (++index < chunks.length) { + chunk = chunks[index] + + if (typeof chunk === 'string') { + value = chunk + } else if (chunk === codes.carriageReturn) { + value = values.cr + } else if (chunk === codes.lineFeed) { + value = values.lf + } else if (chunk === codes.carriageReturnLineFeed) { + value = values.cr + values.lf + } else if (chunk === codes.horizontalTab) { + value = values.ht + } else if (chunk === codes.virtualSpace) { + if (atTab) continue + value = values.space + } else { + assert__default['default'].equal( + typeof chunk, + 'number', + 'expected number' + ) + // Currently only replacement character. + value = fromCharCode(chunk) + } + + atTab = chunk === codes.horizontalTab + result.push(value) + } + + return result.join('') +} + +module.exports = serializeChunks diff --git a/node_modules/micromark/lib/util/serialize-chunks.mjs b/node_modules/micromark/lib/util/serialize-chunks.mjs new file mode 100644 index 00000000..42ab3a9a --- /dev/null +++ b/node_modules/micromark/lib/util/serialize-chunks.mjs @@ -0,0 +1,42 @@ +export default serializeChunks + +import assert from 'assert' +import codes from '../character/codes.mjs' +import values from '../character/values.mjs' +import fromCharCode from '../constant/from-char-code.mjs' + +function serializeChunks(chunks) { + var index = -1 + var result = [] + var chunk + var value + var atTab + + while (++index < chunks.length) { + chunk = chunks[index] + + if (typeof chunk === 'string') { + value = chunk + } else if (chunk === codes.carriageReturn) { + value = values.cr + } else if (chunk === codes.lineFeed) { + value = values.lf + } else if (chunk === codes.carriageReturnLineFeed) { + value = values.cr + values.lf + } else if (chunk === codes.horizontalTab) { + value = values.ht + } else if (chunk === codes.virtualSpace) { + if (atTab) continue + value = values.space + } else { + assert.equal(typeof chunk, 'number', 'expected number') + // Currently only replacement character. + value = fromCharCode(chunk) + } + + atTab = chunk === codes.horizontalTab + result.push(value) + } + + return result.join('') +} diff --git a/node_modules/micromark/lib/util/shallow.js b/node_modules/micromark/lib/util/shallow.js new file mode 100644 index 00000000..f980ab99 --- /dev/null +++ b/node_modules/micromark/lib/util/shallow.js @@ -0,0 +1,9 @@ +'use strict' + +var assign = require('../constant/assign.js') + +function shallow(object) { + return assign({}, object) +} + +module.exports = shallow diff --git a/node_modules/micromark/lib/util/shallow.mjs b/node_modules/micromark/lib/util/shallow.mjs new file mode 100644 index 00000000..e121ccaa --- /dev/null +++ b/node_modules/micromark/lib/util/shallow.mjs @@ -0,0 +1,7 @@ +export default shallow + +import assign from '../constant/assign.mjs' + +function shallow(object) { + return assign({}, object) +} diff --git a/node_modules/micromark/lib/util/size-chunks.js b/node_modules/micromark/lib/util/size-chunks.js new file mode 100644 index 00000000..6b2f5ec7 --- /dev/null +++ b/node_modules/micromark/lib/util/size-chunks.js @@ -0,0 +1,16 @@ +'use strict' + +// Measure the number of character codes in chunks. +// Counts tabs based on their expanded size, and CR+LF as one character. +function sizeChunks(chunks) { + var index = -1 + var size = 0 + + while (++index < chunks.length) { + size += typeof chunks[index] === 'string' ? chunks[index].length : 1 + } + + return size +} + +module.exports = sizeChunks diff --git a/node_modules/micromark/lib/util/size-chunks.mjs b/node_modules/micromark/lib/util/size-chunks.mjs new file mode 100644 index 00000000..d3305bbb --- /dev/null +++ b/node_modules/micromark/lib/util/size-chunks.mjs @@ -0,0 +1,14 @@ +export default sizeChunks + +// Measure the number of character codes in chunks. +// Counts tabs based on their expanded size, and CR+LF as one character. +function sizeChunks(chunks) { + var index = -1 + var size = 0 + + while (++index < chunks.length) { + size += typeof chunks[index] === 'string' ? chunks[index].length : 1 + } + + return size +} diff --git a/node_modules/micromark/lib/util/slice-chunks.js b/node_modules/micromark/lib/util/slice-chunks.js new file mode 100644 index 00000000..b52c8dcc --- /dev/null +++ b/node_modules/micromark/lib/util/slice-chunks.js @@ -0,0 +1,43 @@ +'use strict' + +var assert = require('assert') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +function sliceChunks(chunks, token) { + var startIndex = token.start._index + var startBufferIndex = token.start._bufferIndex + var endIndex = token.end._index + var endBufferIndex = token.end._bufferIndex + var view + + if (startIndex === endIndex) { + assert__default['default']( + endBufferIndex > -1, + 'expected non-negative end buffer index' + ) + assert__default['default']( + startBufferIndex > -1, + 'expected non-negative start buffer index' + ) + view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)] + } else { + view = chunks.slice(startIndex, endIndex) + + if (startBufferIndex > -1) { + view[0] = view[0].slice(startBufferIndex) + } + + if (endBufferIndex > 0) { + view.push(chunks[endIndex].slice(0, endBufferIndex)) + } + } + + return view +} + +module.exports = sliceChunks diff --git a/node_modules/micromark/lib/util/slice-chunks.mjs b/node_modules/micromark/lib/util/slice-chunks.mjs new file mode 100644 index 00000000..987bbe1d --- /dev/null +++ b/node_modules/micromark/lib/util/slice-chunks.mjs @@ -0,0 +1,29 @@ +export default sliceChunks + +import assert from 'assert' + +function sliceChunks(chunks, token) { + var startIndex = token.start._index + var startBufferIndex = token.start._bufferIndex + var endIndex = token.end._index + var endBufferIndex = token.end._bufferIndex + var view + + if (startIndex === endIndex) { + assert(endBufferIndex > -1, 'expected non-negative end buffer index') + assert(startBufferIndex > -1, 'expected non-negative start buffer index') + view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)] + } else { + view = chunks.slice(startIndex, endIndex) + + if (startBufferIndex > -1) { + view[0] = view[0].slice(startBufferIndex) + } + + if (endBufferIndex > 0) { + view.push(chunks[endIndex].slice(0, endBufferIndex)) + } + } + + return view +} diff --git a/node_modules/micromark/lib/util/subtokenize.js b/node_modules/micromark/lib/util/subtokenize.js new file mode 100644 index 00000000..9e7648c0 --- /dev/null +++ b/node_modules/micromark/lib/util/subtokenize.js @@ -0,0 +1,219 @@ +'use strict' + +var assert = require('assert') +var codes = require('../character/codes.js') +var assign = require('../constant/assign.js') +var types = require('../constant/types.js') +var chunkedSplice = require('./chunked-splice.js') +var shallow = require('./shallow.js') + +function _interopDefaultLegacy(e) { + return e && typeof e === 'object' && 'default' in e ? e : {default: e} +} + +var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert) + +function subtokenize(events) { + var jumps = {} + var index = -1 + var event + var lineIndex + var otherIndex + var otherEvent + var parameters + var subevents + var more + + while (++index < events.length) { + while (index in jumps) { + index = jumps[index] + } + + event = events[index] + + // Add a hook for the GFM tasklist extension, which needs to know if text + // is in the first content of a list item. + if ( + index && + event[1].type === types.chunkFlow && + events[index - 1][1].type === types.listItemPrefix + ) { + subevents = event[1]._tokenizer.events + otherIndex = 0 + + if ( + otherIndex < subevents.length && + subevents[otherIndex][1].type === types.lineEndingBlank + ) { + otherIndex += 2 + } + + if ( + otherIndex < subevents.length && + subevents[otherIndex][1].type === types.content + ) { + while (++otherIndex < subevents.length) { + if (subevents[otherIndex][1].type === types.content) { + break + } + + if (subevents[otherIndex][1].type === types.chunkText) { + subevents[otherIndex][1].isInFirstContentOfListItem = true + otherIndex++ + } + } + } + } + + // Enter. + if (event[0] === 'enter') { + if (event[1].contentType) { + assign(jumps, subcontent(events, index)) + index = jumps[index] + more = true + } + } + // Exit. + else if (event[1]._container || event[1]._movePreviousLineEndings) { + otherIndex = index + lineIndex = undefined + + while (otherIndex--) { + otherEvent = events[otherIndex] + + if ( + otherEvent[1].type === types.lineEnding || + otherEvent[1].type === types.lineEndingBlank + ) { + if (otherEvent[0] === 'enter') { + if (lineIndex) { + events[lineIndex][1].type = types.lineEndingBlank + } + + otherEvent[1].type = types.lineEnding + lineIndex = otherIndex + } + } else { + break + } + } + + if (lineIndex) { + // Fix position. + event[1].end = shallow(events[lineIndex][1].start) + + // Switch container exit w/ line endings. + parameters = events.slice(lineIndex, index) + parameters.unshift(event) + chunkedSplice(events, lineIndex, index - lineIndex + 1, parameters) + } + } + } + + return !more +} + +function subcontent(events, eventIndex) { + var token = events[eventIndex][1] + var context = events[eventIndex][2] + var startPosition = eventIndex - 1 + var startPositions = [] + var tokenizer = + token._tokenizer || context.parser[token.contentType](token.start) + var childEvents = tokenizer.events + var jumps = [] + var gaps = {} + var stream + var previous + var index + var entered + var end + var adjust + + // Loop forward through the linked tokens to pass them in order to the + // subtokenizer. + while (token) { + // Find the position of the event for this token. + while (events[++startPosition][1] !== token) { + // Empty. + } + + startPositions.push(startPosition) + + if (!token._tokenizer) { + stream = context.sliceStream(token) + + if (!token.next) { + stream.push(codes.eof) + } + + if (previous) { + tokenizer.defineSkip(token.start) + } + + if (token.isInFirstContentOfListItem) { + tokenizer._gfmTasklistFirstContentOfListItem = true + } + + tokenizer.write(stream) + + if (token.isInFirstContentOfListItem) { + tokenizer._gfmTasklistFirstContentOfListItem = undefined + } + } + + // Unravel the next token. + previous = token + token = token.next + } + + // Now, loop back through all events (and linked tokens), to figure out which + // parts belong where. + token = previous + index = childEvents.length + + while (index--) { + // Make sure we’ve at least seen something (final eol is part of the last + // token). + if (childEvents[index][0] === 'enter') { + entered = true + } else if ( + // Find a void token that includes a break. + entered && + childEvents[index][1].type === childEvents[index - 1][1].type && + childEvents[index][1].start.line !== childEvents[index][1].end.line + ) { + add(childEvents.slice(index + 1, end)) + assert__default['default'](token.previous, 'expected a previous token') + // Help GC. + token._tokenizer = token.next = undefined + token = token.previous + end = index + 1 + } + } + + assert__default['default'](!token.previous, 'expected no previous token') + // Help GC. + tokenizer.events = token._tokenizer = token.next = undefined + + // Do head: + add(childEvents.slice(0, end)) + + index = -1 + adjust = 0 + + while (++index < jumps.length) { + gaps[adjust + jumps[index][0]] = adjust + jumps[index][1] + adjust += jumps[index][1] - jumps[index][0] - 1 + } + + return gaps + + function add(slice) { + var start = startPositions.pop() + jumps.unshift([start, start + slice.length - 1]) + chunkedSplice(events, start, 2, slice) + } +} + +module.exports = subtokenize diff --git a/node_modules/micromark/lib/util/subtokenize.mjs b/node_modules/micromark/lib/util/subtokenize.mjs new file mode 100644 index 00000000..7844130d --- /dev/null +++ b/node_modules/micromark/lib/util/subtokenize.mjs @@ -0,0 +1,211 @@ +export default subtokenize + +import assert from 'assert' +import codes from '../character/codes.mjs' +import assign from '../constant/assign.mjs' +import types from '../constant/types.mjs' +import chunkedSplice from './chunked-splice.mjs' +import shallow from './shallow.mjs' + +function subtokenize(events) { + var jumps = {} + var index = -1 + var event + var lineIndex + var otherIndex + var otherEvent + var parameters + var subevents + var more + + while (++index < events.length) { + while (index in jumps) { + index = jumps[index] + } + + event = events[index] + + // Add a hook for the GFM tasklist extension, which needs to know if text + // is in the first content of a list item. + if ( + index && + event[1].type === types.chunkFlow && + events[index - 1][1].type === types.listItemPrefix + ) { + subevents = event[1]._tokenizer.events + otherIndex = 0 + + if ( + otherIndex < subevents.length && + subevents[otherIndex][1].type === types.lineEndingBlank + ) { + otherIndex += 2 + } + + if ( + otherIndex < subevents.length && + subevents[otherIndex][1].type === types.content + ) { + while (++otherIndex < subevents.length) { + if (subevents[otherIndex][1].type === types.content) { + break + } + + if (subevents[otherIndex][1].type === types.chunkText) { + subevents[otherIndex][1].isInFirstContentOfListItem = true + otherIndex++ + } + } + } + } + + // Enter. + if (event[0] === 'enter') { + if (event[1].contentType) { + assign(jumps, subcontent(events, index)) + index = jumps[index] + more = true + } + } + // Exit. + else if (event[1]._container || event[1]._movePreviousLineEndings) { + otherIndex = index + lineIndex = undefined + + while (otherIndex--) { + otherEvent = events[otherIndex] + + if ( + otherEvent[1].type === types.lineEnding || + otherEvent[1].type === types.lineEndingBlank + ) { + if (otherEvent[0] === 'enter') { + if (lineIndex) { + events[lineIndex][1].type = types.lineEndingBlank + } + + otherEvent[1].type = types.lineEnding + lineIndex = otherIndex + } + } else { + break + } + } + + if (lineIndex) { + // Fix position. + event[1].end = shallow(events[lineIndex][1].start) + + // Switch container exit w/ line endings. + parameters = events.slice(lineIndex, index) + parameters.unshift(event) + chunkedSplice(events, lineIndex, index - lineIndex + 1, parameters) + } + } + } + + return !more +} + +function subcontent(events, eventIndex) { + var token = events[eventIndex][1] + var context = events[eventIndex][2] + var startPosition = eventIndex - 1 + var startPositions = [] + var tokenizer = + token._tokenizer || context.parser[token.contentType](token.start) + var childEvents = tokenizer.events + var jumps = [] + var gaps = {} + var stream + var previous + var index + var entered + var end + var adjust + + // Loop forward through the linked tokens to pass them in order to the + // subtokenizer. + while (token) { + // Find the position of the event for this token. + while (events[++startPosition][1] !== token) { + // Empty. + } + + startPositions.push(startPosition) + + if (!token._tokenizer) { + stream = context.sliceStream(token) + + if (!token.next) { + stream.push(codes.eof) + } + + if (previous) { + tokenizer.defineSkip(token.start) + } + + if (token.isInFirstContentOfListItem) { + tokenizer._gfmTasklistFirstContentOfListItem = true + } + + tokenizer.write(stream) + + if (token.isInFirstContentOfListItem) { + tokenizer._gfmTasklistFirstContentOfListItem = undefined + } + } + + // Unravel the next token. + previous = token + token = token.next + } + + // Now, loop back through all events (and linked tokens), to figure out which + // parts belong where. + token = previous + index = childEvents.length + + while (index--) { + // Make sure we’ve at least seen something (final eol is part of the last + // token). + if (childEvents[index][0] === 'enter') { + entered = true + } else if ( + // Find a void token that includes a break. + entered && + childEvents[index][1].type === childEvents[index - 1][1].type && + childEvents[index][1].start.line !== childEvents[index][1].end.line + ) { + add(childEvents.slice(index + 1, end)) + assert(token.previous, 'expected a previous token') + // Help GC. + token._tokenizer = token.next = undefined + token = token.previous + end = index + 1 + } + } + + assert(!token.previous, 'expected no previous token') + // Help GC. + tokenizer.events = token._tokenizer = token.next = undefined + + // Do head: + add(childEvents.slice(0, end)) + + index = -1 + adjust = 0 + + while (++index < jumps.length) { + gaps[adjust + jumps[index][0]] = adjust + jumps[index][1] + adjust += jumps[index][1] - jumps[index][0] - 1 + } + + return gaps + + function add(slice) { + var start = startPositions.pop() + jumps.unshift([start, start + slice.length - 1]) + chunkedSplice(events, start, 2, slice) + } +} diff --git a/node_modules/micromark/license b/node_modules/micromark/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/micromark/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer <tituswormer@gmail.com> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/micromark/package.json b/node_modules/micromark/package.json new file mode 100644 index 00000000..b1b3941c --- /dev/null +++ b/node_modules/micromark/package.json @@ -0,0 +1,208 @@ +{ + "name": "micromark", + "version": "2.11.4", + "description": "small commonmark compliant markdown parser with positional info and concrete tokens", + "license": "MIT", + "keywords": [ + "commonmark", + "compiler", + "gfm", + "html", + "lexer", + "markdown", + "markup", + "md", + "unified", + "parse", + "parser", + "plugin", + "process", + "remark", + "render", + "renderer", + "token", + "tokenizer" + ], + "repository": "micromark/micromark", + "bugs": "https://github.com/micromark/micromark/issues", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", + "contributors": [ + "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", + "Bogdan Chadkin <trysound@yandex.ru>", + "Christian Murphy <christian.murphy.42@gmail.com>", + "Marouane Fazouane <fazouanem3@gmail.com>", + "John Otander <johnotander@gmail.com> (https://johno.com)", + "Stephan Schneider <stephanschndr@gmail.com>", + "Victor Felder <victor@draft.li> (https://draft.li)", + "Mudit Ameta <zeusdeux@gmail.com> (https://mudit.xyz)", + "Merlijn Vos <merlijn@soverin.net>" + ], + "files": [ + "dist/", + "lib/", + "buffer.d.ts", + "buffer.js", + "buffer.mjs", + "index.js", + "index.mjs", + "index.d.ts", + "stream.d.ts", + "stream.js", + "stream.mjs" + ], + "main": "./index.js", + "types": "index.d.ts", + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" + }, + "devDependencies": { + "@babel/core": "^7.0.0", + "@rollup/plugin-babel": "^5.0.0", + "@rollup/plugin-commonjs": "^17.0.0", + "@rollup/plugin-node-resolve": "^11.0.0", + "@types/events": "^3.0.0", + "@unicode/unicode-13.0.0": "^1.0.0", + "babel-plugin-inline-constants": "^1.0.0", + "babel-plugin-unassert": "^3.0.0", + "babel-plugin-undebug": "^1.0.0", + "c8": "^7.0.0", + "character-entities": "^1.0.0", + "commonmark.json": "^0.29.0", + "concat-stream": "^2.0.0", + "cross-env": "^7.0.0", + "dtslint": "^4.0.0", + "eslint-plugin-es": "^4.0.0", + "eslint-plugin-security": "^1.0.0", + "esm": "^3.0.0", + "glob": "^7.0.0", + "gzip-size-cli": "^4.0.0", + "jsfuzz": "1.0.14", + "ms": "^2.0.0", + "patch-package": "^6.0.0", + "prettier": "^2.0.0", + "regenerate": "^1.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "resolve-from": "^5.0.0", + "rollup": "^2.0.0", + "rollup-plugin-terser": "^7.0.0", + "tape": "^5.0.0", + "xo": "^0.37.0" + }, + "scripts": { + "generate-lib-types": "node --experimental-modules script/generate-constant-typings.mjs", + "generate-lib-expressions": "node --experimental-modules script/generate-expressions.mjs", + "generate-lib-cjs": "rollup -c --silent", + "generate-lib": "npm run generate-lib-types && npm run generate-lib-expressions && npm run generate-lib-cjs", + "generate-dist-types": "node --experimental-modules script/copy-dict.mjs", + "generate-dist-js": "cross-env BUILD=dist rollup -c --silent", + "generate-dist": "npm run generate-dist-types && npm run generate-dist-js", + "generate-size": "cross-env BUILD=size rollup -c --silent && gzip-size micromark.min.js && gzip-size --raw micromark.min.js", + "generate": "npm run generate-lib && npm run generate-dist && npm run generate-size", + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "pretest-fuzz": "patch-package --patch-dir script/patches && node script/generate-fixtures.mjs", + "test-fuzz": "cross-env NODE_OPTIONS=\"-r esm\" timeout 15m jsfuzz test/fuzz.js test/fixtures", + "test-api": "node --experimental-modules test/index.mjs", + "test-coverage": "c8 --check-coverage --lines 100 --functions 100 --branches 100 --reporter lcov node --experimental-modules test/index.mjs", + "test-types": "dtslint .", + "test": "npm run generate && npm run format && npm run test-coverage && npm run test-types" + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "esnext": false, + "extensions": [ + "mjs" + ], + "prettier": true, + "envs": [ + "shared-node-browser" + ], + "rules": { + "import/extensions": [ + "error", + "always" + ] + }, + "overrides": [ + { + "files": [ + "lib/**/*.{js,mjs}" + ], + "plugin": [ + "es" + ], + "extends": [ + "plugin:es/no-new-in-es2015", + "plugin:security/recommended" + ], + "rules": { + "complexity": "off", + "es/no-array-from": "off", + "es/no-object-assign": "off", + "es/no-modules": "off", + "import/no-mutable-exports": "off", + "import/no-anonymous-default-export": "off", + "guard-for-in": "off", + "max-depth": "off", + "no-multi-assign": "off", + "no-unmodified-loop-condition": "off", + "security/detect-object-injection": "off", + "unicorn/explicit-length-check": "off", + "unicorn/prefer-includes": "off", + "unicorn/prefer-number-properties": "off" + } + }, + { + "files": [ + "**/*.d.ts" + ], + "rules": { + "import/extensions": [ + "error", + "never" + ] + } + }, + { + "files": [ + "test/**/*.{js,mjs}" + ], + "rules": { + "import/no-unassigned-import": "off" + } + } + ], + "ignores": [ + "dist/", + "lib/**/*.js", + "micromark.test.ts" + ] + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm", + [ + "lint-no-html", + false + ] + ] + } +} diff --git a/node_modules/micromark/readme.md b/node_modules/micromark/readme.md new file mode 100644 index 00000000..db7931df --- /dev/null +++ b/node_modules/micromark/readme.md @@ -0,0 +1,737 @@ +<h1 align="center"> + <img src="https://raw.githubusercontent.com/micromark/micromark/9c34547/logo.svg?sanitize=true" alt="micromark" width="400" /> +</h1> + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][bundle-size-badge]][bundle-size] +[![Sponsors][sponsors-badge]][opencollective] +[![Backers][backers-badge]][opencollective] +[![Chat][chat-badge]][chat] + +The smallest CommonMark compliant markdown parser with positional info and +concrete tokens. + +* [x] **[compliant][commonmark]** (100% to CommonMark) +* [x] **[extensions][]** ([GFM][], [directives][], [footnotes][], + [frontmatter][], [math][], [MDX.js][mdxjs]) +* [x] **[safe][security]** (by default) +* [x] **[small][size]** (smallest CM parser that exists) +* [x] **[robust][test]** (1800+ tests, 100% coverage, fuzz testing) + +## Intro + +micromark is a long awaited markdown parser. +It uses a [state machine][cmsm] to parse the entirety of markdown into concrete +tokens. +It’s the smallest 100% [CommonMark][] compliant markdown parser in JavaScript. +It was made to replace the internals of [`remark-parse`][remark-parse], the most +[popular][] markdown parser. +Its API compiles to HTML, but its parts are made to be used separately, so as to +generate syntax trees ([`mdast-util-from-markdown`][from-markdown]) or compile +to other output formats. +It’s in open beta: up next are [CMSM][] and CSTs. + +* for updates, see [Twitter][] +* for more about us, see [`unifiedjs.com`][site] +* for questions, see [Discussions][chat] +* to help, see [contribute][] or [sponsor][] below + +## Contents + +* [Install](#install) +* [Use](#use) +* [API](#api) + * [`micromark(doc[, encoding][, options])`](#micromarkdoc-encoding-options) + * [`micromarkStream(options?)`](#micromarkstreamoptions) +* [Extensions](#extensions) + * [`SyntaxExtension`](#syntaxextension) + * [`HtmlExtension`](#htmlextension) + * [List of extensions](#list-of-extensions) +* [Syntax tree](#syntax-tree) +* [CommonMark](#commonmark) +* [Grammar](#grammar) +* [Test](#test) +* [Size & debug](#size--debug) +* [Comparison](#comparison) +* [Version](#version) +* [Security](#security) +* [Contribute](#contribute) +* [Sponsor](#sponsor) +* [Origin story](#origin-story) +* [License](#license) + +## Install + +[npm][]: + +```sh +npm install micromark +``` + +## Use + +Typical use (buffering): + +```js +var micromark = require('micromark') + +console.log(micromark('## Hello, *world*!')) +``` + +Yields: + +```html +<h2>Hello, <em>world</em>!</h2> +``` + +The same can be done with ESM (in Node 10+, browsers that support it, or with a +bundler), in an `example.mjs` file, like so: + +```js +import micromark from 'micromark' + +console.log(micromark('## Hello, *world*!')) +``` + +You can pass extensions (in this case [`micromark-extension-gfm`][gfm]): + +```js +var micromark = require('micromark') +var gfmSyntax = require('micromark-extension-gfm') +var gfmHtml = require('micromark-extension-gfm/html') + +var doc = '* [x] contact@example.com ~~strikethrough~~' + +var result = micromark(doc, { + extensions: [gfmSyntax()], + htmlExtensions: [gfmHtml] +}) + +console.log(result) +``` + +Yields: + +```html +<ul> +<li><input checked="" disabled="" type="checkbox"> <a href="mailto:contact@example.com">contact@example.com</a> <del>strikethrough</del></li> +</ul> +``` + +Streaming interface: + +```js +var fs = require('fs') +var micromarkStream = require('micromark/stream') + +fs.createReadStream('example.md') + .on('error', handleError) + .pipe(micromarkStream()) + .pipe(process.stdout) + +function handleError(err) { + // Handle your error here! + throw err +} +``` + +## API + +This section documents the API. +The parts can be used separately, but this isn’t documented yet. + +### `micromark(doc[, encoding][, options])` + +Compile markdown to HTML. + +##### Parameters + +###### `doc` + +Markdown to parse (`string` or `Buffer`) + +###### `encoding` + +[Character encoding][encoding] to understand `doc` as when it’s a +[`Buffer`][buffer] (`string`, default: `'utf8'`). + +###### `options.defaultLineEnding` + +Value to use for line endings not in `doc` (`string`, default: first line +ending or `'\n'`). + +Generally, micromark copies line endings (`'\r'`, `'\n'`, `'\r\n'`) in the +markdown document over to the compiled HTML. +In some cases, such as `> a`, CommonMark requires that extra line endings are +added: `<blockquote>\n<p>a</p>\n</blockquote>`. + +###### `options.allowDangerousHtml` + +Whether to allow embedded HTML (`boolean`, default: `false`). + +###### `options.allowDangerousProtocol` + +Whether to allow potentially dangerous protocols in links and images (`boolean`, +default: `false`). +URLs relative to the current protocol are always allowed (such as, `image.jpg`). +For links, the allowed protocols are `http`, `https`, `irc`, `ircs`, `mailto`, +and `xmpp`. +For images, the allowed protocols are `http` and `https`. + +###### `options.extensions` + +Array of syntax extensions ([`Array.<SyntaxExtension>`][syntax-extension], +default: `[]`). + +###### `options.htmlExtensions` + +Array of HTML extensions ([`Array.<HtmlExtension>`][html-extension], default: +`[]`). + +##### Returns + +`string` — Compiled HTML. + +### `micromarkStream(options?)` + +Streaming interface of micromark. +Compiles markdown to HTML. +`options` are the same as the buffering API above. +Available at `require('micromark/stream')`. +Note that some of the work to parse markdown can be done streaming, but in the +end buffering is required. + +micromark does not handle errors for you, so you must handle errors on whatever +streams you pipe into it. +As markdown does not know errors, `micromark` itself does not emit errors. + +## Extensions + +There are two types of extensions for micromark: +[`SyntaxExtension`][syntax-extension] and [`HtmlExtension`][html-extension]. +They can be passed in [`extensions`][option-extensions] or +[`htmlExtensions`][option-htmlextensions], respectively. + +### `SyntaxExtension` + +A syntax extension is an object whose fields are the names of hooks, referring +to where constructs “hook” into. +`content` (a block of, well, content: definitions and paragraphs), `document` +(containers such as block quotes and lists), `flow` (block constructs such as +ATX and setext headings, HTML, indented and fenced code, thematic breaks), +`string` (things that work in a few places such as destinations, fenced code +info, etc: character escapes and -references), or `text` (rich inline text: +autolinks, character escapes and -references, code, hard breaks, HTML, images, +links, emphasis, strong). + +The fields at such objects are character codes, mapping to constructs as values. +The built in [constructs][] are an extension. +See it and the [existing extensions][extensions] for inspiration. + +### `HtmlExtension` + +An HTML extension is an object whose fields are either `enter` or `exit` +(reflecting whether a token is entered or exited). +The values at such objects are names of tokens mapping to handlers. +See the [existing extensions][extensions] for inspiration. + +### List of extensions + +* [`micromark/micromark-extension-directive`][directives] + — support directives (generic extensions) +* [`micromark/micromark-extension-footnote`][footnotes] + — support footnotes +* [`micromark/micromark-extension-frontmatter`][frontmatter] + — support frontmatter (YAML, TOML, etc) +* [`micromark/micromark-extension-gfm`][gfm] + — support GFM (GitHub Flavored Markdown) +* [`micromark/micromark-extension-gfm-autolink-literal`](https://github.com/micromark/micromark-extension-gfm-autolink-literal) + — support GFM autolink literals +* [`micromark/micromark-extension-gfm-strikethrough`](https://github.com/micromark/micromark-extension-gfm-strikethrough) + — support GFM strikethrough +* [`micromark/micromark-extension-gfm-table`](https://github.com/micromark/micromark-extension-gfm-table) + — support GFM tables +* [`micromark/micromark-extension-gfm-tagfilter`](https://github.com/micromark/micromark-extension-gfm-tagfilter) + — support GFM tagfilter +* [`micromark/micromark-extension-gfm-task-list-item`](https://github.com/micromark/micromark-extension-gfm-task-list-item) + — support GFM tasklists +* [`micromark/micromark-extension-math`][math] + — support math +* [`micromark/micromark-extension-mdx`](https://github.com/micromark/micromark-extension-mdx) + — support MDX +* [`micromark/micromark-extension-mdxjs`][mdxjs] + — support MDX.js +* [`micromark/micromark-extension-mdx-expression`](https://github.com/micromark/micromark-extension-mdx-expression) + — support MDX (or MDX.js) expressions +* [`micromark/micromark-extension-mdx-jsx`](https://github.com/micromark/micromark-extension-mdx-jsx) + — support MDX (or MDX.js) JSX +* [`micromark/micromark-extension-mdx-md`](https://github.com/micromark/micromark-extension-mdx-md) + — support misc MDX changes +* [`micromark/micromark-extension-mdxjs-esm`](https://github.com/micromark/micromark-extension-mdxjs-esm) + — support MDX.js import/exports + +## Syntax tree + +A higher level project, [`mdast-util-from-markdown`][from-markdown], can give +you an AST. + +```js +var fromMarkdown = require('mdast-util-from-markdown') + +var result = fromMarkdown('## Hello, *world*!') + +console.log(result.children[0]) +``` + +Yields: + +```js +{ + type: 'heading', + depth: 2, + children: [ + {type: 'text', value: 'Hello, ', position: [Object]}, + {type: 'emphasis', children: [Array], position: [Object]}, + {type: 'text', value: '!', position: [Object]} + ], + position: { + start: {line: 1, column: 1, offset: 0}, + end: {line: 1, column: 19, offset: 18} + } +} +``` + +Another level up is [**remark**][remark], which provides a nice interface and +hundreds of plugins. + +## CommonMark + +The first definition of “Markdown” gave several examples of how it worked, +showing input Markdown and output HTML, and came with a reference implementation +(`Markdown.pl`). +When new implementations followed, they mostly followed the first definition, +but deviated from the first implementation, and added extensions, thus making +the format a family of formats. + +Some years later, an attempt was made to standardize the differences between +implementations, by specifying how several edge cases should be handled, through +more input and output examples. +This is known as [CommonMark][commonmark-spec], and many implementations now +work towards some degree of CommonMark compliancy. +Still, CommonMark describes what the output in HTML should be given some +input, which leaves many edge cases up for debate, and does not answer what +should happen for other output formats. + +micromark passes all tests from CommonMark and has many more tests to match the +CommonMark reference parsers. +Finally, it comes with [CMSM][], which describes how to parse markup, instead +of documenting input and output examples. + +## Grammar + +The syntax of markdown can be described in Backus–Naur form (BNF) as: + +```bnf +markdown = .* +``` + +No, that’s not a [typo](http://trevorjim.com/a-specification-for-markdown/): +markdown has no syntax errors; anything thrown at it renders *something*. + +## Test + +micromark is tested with the \~650 CommonMark tests and more than 1.2k extra +tests confirmed with CM reference parsers. +These tests reach all branches in the code, thus this project has 100% coverage. +Finally, we use fuzz testing to ensure micromark is stable, reliable, and +secure. + +To build, format, and test the codebase, use `$ npm test` after clone and +install. +The `$ npm run test-api` and `$ npm run test-coverage` scripts check the unit +tests and their coverage, respectively. +The `$ npm run test-types` script checks TypeScript definitions. + +The `$ npm run test-fuzz` script does fuzz testing for 15 minutes. +The timeout is provided by GNU coreutils **timeout(1)**, which might not be +available on your system. +Either install it or remove it from the script. + +## Size & debug + +micromark is really small. +A ton of time went into making sure it minifies well, by the way code is written +but also through custom build scripts to pre-evaluate certain expressions. +Furthermore, care went into making it compress well with GZip and Brotli. + +Normally, you’ll use the pre-evaluated version of micromark, which is published +in the `dist/` folder and has entries in the root. +While developing or debugging, you can switch to use the source, which is +published in the `lib/` folder, and comes instrumented with assertions and debug +messages. +To see debug messages, run your script with a `DEBUG` env variable, such as with +`DEBUG="micromark" node script.js`. + +To generate the codebase, use `$ npm run generate` after clone and install. +The `$ npm run generate-dist` script specifically takes `lib/` and generates +`dist/`. +The `$ npm run generate-size` script checks the bundle size of `dist/`. + +## Comparison + +There are many other markdown parsers out there, and maybe they’re better suited +to your use case! +Here is a short comparison of a couple of ’em in JavaScript. +Note that this list is made by the folks who make `micromark` and `remark`, so +there is some bias. + +**Note**: these are, in fact, not really comparable: micromark (and remark) +focus on completely different things than other markdown parsers do. +Sure, you can generate HTML from markdown with them, but micromark (and remark) +are created for (abstract or concrete) syntax trees—to inspect, transform, and +generate content, so that you can make things like [MDX][], [Prettier][], or +[Gatsby][]. + +###### micromark + +micromark can be used in two different ways. +It can either be used, optionally with existing extensions, to get HTML pretty +easily. +Or, it can give tremendous power, such as access to all tokens with positional +info, at the cost of being hard to get into. +It’s super small, pretty fast, and has 100% CommonMark compliance. +It has syntax extensions, such as supporting 100% GFM compliance (with +`micromark-extension-gfm`), but they’re rather complex to write. +It’s the newest parser on the block. + +If you’re looking for fine grained control, use micromark. + +###### remark + +[remark][] is the most popular markdown parser. +It’s built on top of `micromark` and boasts syntax trees. +For an analogy, it’s like if Babel, ESLint, and more, were one project. +It supports the syntax extensions that micromark has (so it’s 100% CM compliant +and can be 100% GFM compliant), but most of the work is done in plugins that +transform or inspect the tree. +Transforming the tree is relatively easy: it’s a JSON object that can be +manipulated directly. +remark is stable, widely used, and extremely powerful for handling complex data. + +If you’re looking to inspect or transform lots of content, use [remark][]. + +###### marked + +[marked][] is the oldest markdown parser on the block. +It’s been around for ages, is battle tested, small, popular, and has a bunch of +extensions, but doesn’t match CommonMark or GFM, and is unsafe by default. + +If you have markdown you trust and want to turn it into HTML without a fuss, use +[marked][]. + +###### markdown-it + +[markdown-it][] is a good, stable, and essentially CommonMark compliant markdown +parser, with (optional) support for some GFM features as well. +It’s used a lot as a direct dependency in packages, but is rather big. +It shines at syntax extensions, where you want to support not just markdown, but +*your* (company’s) version of markdown. + +If you’re in Node and have CommonMark-compliant (or funky) markdown and want to +turn it into HTML, use [markdown-it][]. + +###### Others + +There are lots of other markdown parsers! +Some say they’re small, or fast, or that they’re CommonMark compliant — but +that’s not always true. +This list is not supposed to be exhaustive. +This list of markdown parsers is a snapshot in time of why (not) to use +(alternatives to) `micromark`: they’re all good choices, depending on what your +goals are. + +## Version + +The open beta of micromark starts at version `2.0.0` (there was a different +package published on npm as `micromark` before). +micromark will adhere to semver at `3.0.0`. +Use tilde ranges for now: `"micromark": "~2.10.1"`. + +## Security + +The typical security aspect discussed for markdown is [cross-site scripting +(XSS)][xss] attacks. +It’s safe to compile markdown to HTML if it does not include embedded HTML nor +uses dangerous protocols in links (such as `javascript:` or `data:`). +micromark is safe by default when embedded HTML or dangerous protocols are used +too, as it encodes or drops them. +Turning on the `allowDangerousHtml` or `allowDangerousProtocol` options for +user-provided markdown opens you up to XSS attacks. + +Another aspect is DDoS attacks. +For example, an attacker could throw a 100mb file at micromark, in which case +the JavaScript engine will run out of memory and crash. +It is also possible to crash micromark with smaller payloads, notably when +thousands of links, images, emphasis, or strong are opened but not closed. +It is wise to cap the accepted size of input (500kb can hold a big book) and to +process content in a different thread or worker so that it can be stopped when +needed. + +Using extensions might also be unsafe, refer to their documentation for more +information. + +For more information on markdown sanitation, see +[`improper-markup-sanitization.md`][improper] by [**@chalker**][chalker]. + +See [`security.md`][securitymd] in [`micromark/.github`][health] for how to +submit a security report. + +## Contribute + +See [`contributing.md`][contributing] in [`micromark/.github`][health] for ways +to get started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organisation, or community you agree to +abide by its terms. + +## Sponsor + +Support this effort and give back by sponsoring on [OpenCollective][]! + +<table> +<tr valign="middle"> +<td width="100%" align="center" colspan="10"> + <br> + <a href="https://www.salesforce.com">Salesforce</a> 🏅<br><br> + <a href="https://www.salesforce.com"><img src="https://images.opencollective.com/salesforce/ca8f997/logo/512.png" width="256"></a> +</td> +</tr> +<tr valign="middle"> +<td width="20%" align="center" colspan="2"> + <a href="https://www.gatsbyjs.org">Gatsby</a> 🥇<br><br> + <a href="https://www.gatsbyjs.org"><img src="https://avatars1.githubusercontent.com/u/12551863?s=256&v=4" width="128"></a> +</td> +<td width="20%" align="center" colspan="2"> + <a href="https://vercel.com">Vercel</a> 🥇<br><br> + <a href="https://vercel.com"><img src="https://avatars1.githubusercontent.com/u/14985020?s=256&v=4" width="128"></a> +</td> +<td width="20%" align="center" colspan="2"> + <a href="https://www.netlify.com">Netlify</a><br><br> + <!--OC has a sharper image--> + <a href="https://www.netlify.com"><img src="https://images.opencollective.com/netlify/4087de2/logo/256.png" width="128"></a> +</td> +<td width="10%" align="center"> + <a href="https://www.holloway.com">Holloway</a><br><br> + <a href="https://www.holloway.com"><img src="https://avatars1.githubusercontent.com/u/35904294?s=128&v=4" width="64"></a> +</td> +<td width="10%" align="center"> + <a href="https://themeisle.com">ThemeIsle</a><br><br> + <a href="https://themeisle.com"><img src="https://avatars1.githubusercontent.com/u/58979018?s=128&v=4" width="64"></a> +</td> +<td width="10%" align="center"> + <a href="https://boosthub.io">Boost Hub</a><br><br> + <a href="https://boosthub.io"><img src="https://images.opencollective.com/boosthub/6318083/logo/128.png" width="64"></a> +</td> +<td width="10%" align="center"> + <a href="https://expo.io">Expo</a><br><br> + <a href="https://expo.io"><img src="https://avatars1.githubusercontent.com/u/12504344?s=128&v=4" width="64"></a> +</td> +</tr> +<tr valign="middle"> +<td width="100%" align="center" colspan="10"> + <br> + <a href="https://opencollective.com/unified"><strong>You?</strong></a> + <br><br> +</td> +</tr> +</table> + +## Origin story + +Over the summer of 2018, micromark was planned, and the idea shared in August +with a couple of friends and potential sponsors. +The problem I (**[@wooorm][]**) had was that issues were piling up in remark and +other repos, but my day job (teaching) was fun, fulfilling, and deserved time +too. +It was getting hard to combine the two. +The thought was to feed two birds with one scone: fix the issues in remark with +a new markdown parser (codename marydown) while being financially supported by +sponsors building fancy stuff on top, such as Gatsby, Contentful, and Vercel +(ZEIT at the time). +**[@johno][]** was making MDX on top of remark at the time (important historical +note: several other folks were working on JSX + markdown too). +We bundled our strengths: MDX was getting some traction and we thought together +we could perhaps make something sustainable. + +In November 2018, we launched with the idea for micromark to solve all existing +bugs, sustaining the existing hundreds of projects, and furthering the exciting +high-level project MDX. +We pushed a single name: unified (which back then was a small but essential +part of the chain). +Gatsby and Vercel were immediate sponsors. +We didn’t know whether it would work, and it worked. +But now you have a new problem: you are getting some financial support (much +more than other open source projects) but it’s not enough money for rent, and +too much money to print stickers with. +You still have your job and issues are still piling up. + +At the start of summer 2019, after a couple months of saving up donations, I +quit my job and worked on unified through fall. +That got the number of open issues down significantly and set up a strong +governance and maintenance system for the collective. +But when the time came to work on micromark, the money was gone again, so I +contracted through winter 2019, and in spring 2020 I could do about half open +source, half contracting. +One of the contracting gigs was to write a new MDX parser, for which I also +documented how to do that with a state machine [in prose][mdx-cmsm]. +That gave me the insight into how the same could be done for markdown: I drafted +[CMSM][], which was some of the core ideas for micromark, but in prose. + +In May 2020, Salesforce reached out: they saw the bugs in remark, how micromark +could help, and the initial work on CMSM. +And they had thousands of Markdown files. +In a for open source uncharacteristic move, they decided to fund my work on +micromark. +A large part of what maintaining open source means, is putting out fires, +triaging issues, and making sure users and sponsors are happy, so it was +amazing to get several months to just focus and make something new. +I remember feeling that this project would probably be the hardest thing I’d +work on: yeah, parsers are pretty difficult, but markdown is on another level. +Markdown is such a giant stack of edge cases on edge cases on even more +weirdness, what a mess. +On August 20, 2020, I released [2.0.0][200], the first working version of +micromark. +And it’s hard to describe how that moment felt. +It was great. + +## License + +[MIT][license] © [Titus Wormer][author] + +<!-- Definitions --> + +[build-badge]: https://github.com/micromark/micromark/workflows/main/badge.svg + +[build]: https://github.com/micromark/micromark/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/micromark/micromark.svg + +[coverage]: https://codecov.io/github/micromark/micromark + +[downloads-badge]: https://img.shields.io/npm/dm/micromark.svg + +[downloads]: https://www.npmjs.com/package/micromark + +[bundle-size-badge]: https://img.shields.io/bundlephobia/minzip/micromark.svg + +[bundle-size]: https://bundlephobia.com/result?p=micromark + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[opencollective]: https://opencollective.com/unified + +[npm]: https://docs.npmjs.com/cli/install + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/micromark/micromark/discussions + +[license]: license + +[author]: https://wooorm.com + +[health]: https://github.com/micromark/.github + +[xss]: https://en.wikipedia.org/wiki/Cross-site_scripting + +[securitymd]: https://github.com/micromark/.github/blob/HEAD/security.md + +[contributing]: https://github.com/micromark/.github/blob/HEAD/contributing.md + +[support]: https://github.com/micromark/.github/blob/HEAD/support.md + +[coc]: https://github.com/micromark/.github/blob/HEAD/code-of-conduct.md + +[twitter]: https://twitter.com/unifiedjs + +[remark]: https://github.com/remarkjs/remark + +[site]: https://unifiedjs.com + +[contribute]: #contribute + +[encoding]: https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings + +[buffer]: https://nodejs.org/api/buffer.html + +[commonmark-spec]: https://commonmark.org + +[popular]: https://www.npmtrends.com/remark-parse-vs-marked-vs-markdown-it + +[remark-parse]: https://unifiedjs.com/explore/package/remark-parse/ + +[improper]: https://github.com/ChALkeR/notes/blob/master/Improper-markup-sanitization.md + +[chalker]: https://github.com/ChALkeR + +[cmsm]: https://github.com/micromark/common-markup-state-machine + +[mdx-cmsm]: https://github.com/micromark/mdx-state-machine + +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown + +[directives]: https://github.com/micromark/micromark-extension-directive + +[footnotes]: https://github.com/micromark/micromark-extension-footnote + +[frontmatter]: https://github.com/micromark/micromark-extension-frontmatter + +[gfm]: https://github.com/micromark/micromark-extension-gfm + +[math]: https://github.com/micromark/micromark-extension-math + +[mdxjs]: https://github.com/micromark/micromark-extension-mdxjs + +[constructs]: lib/constructs.mjs + +[extensions]: #list-of-extensions + +[syntax-extension]: #syntaxextension + +[html-extension]: #htmlextension + +[option-extensions]: #optionsextensions + +[option-htmlextensions]: #optionshtmlextensions + +[marked]: https://github.com/markedjs/marked + +[markdown-it]: https://github.com/markdown-it/markdown-it + +[mdx]: https://github.com/mdx-js/mdx + +[prettier]: https://github.com/prettier/prettier + +[gatsby]: https://github.com/gatsbyjs/gatsby + +[commonmark]: #commonmark + +[size]: #size--debug + +[test]: #test + +[security]: #security + +[sponsor]: #sponsor + +[@wooorm]: https://github.com/wooorm + +[@johno]: https://github.com/johno + +[200]: https://github.com/micromark/micromark/releases/tag/2.0.0 diff --git a/node_modules/micromark/stream.d.ts b/node_modules/micromark/stream.d.ts new file mode 100644 index 00000000..5bf02ebb --- /dev/null +++ b/node_modules/micromark/stream.d.ts @@ -0,0 +1,5 @@ +// Minimum TypeScript Version: 3.0 + +import stream from './dist/stream' + +export default stream diff --git a/node_modules/micromark/stream.js b/node_modules/micromark/stream.js new file mode 100644 index 00000000..e90ff8f6 --- /dev/null +++ b/node_modules/micromark/stream.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('./dist/stream.js') diff --git a/node_modules/micromark/stream.mjs b/node_modules/micromark/stream.mjs new file mode 100644 index 00000000..e33b2288 --- /dev/null +++ b/node_modules/micromark/stream.mjs @@ -0,0 +1 @@ +export {default} from './dist/stream.js' diff --git a/node_modules/parse-entities/index.js b/node_modules/parse-entities/index.js index 1606d02f..106d6d86 100644 --- a/node_modules/parse-entities/index.js +++ b/node_modules/parse-entities/index.js @@ -30,15 +30,15 @@ var defaults = { // Characters. var tab = 9 // '\t' var lineFeed = 10 // '\n' -var formFeed = 12 // '\f' +var formFeed = 12 // '\f' var space = 32 // ' ' -var ampersand = 38 // '&' -var semicolon = 59 // ';' -var lessThan = 60 // '<' -var equalsTo = 61 // '=' -var numberSign = 35 // '#' -var uppercaseX = 88 // 'X' -var lowercaseX = 120 // 'x' +var ampersand = 38 // '&' +var semicolon = 59 // ';' +var lessThan = 60 // '<' +var equalsTo = 61 // '=' +var numberSign = 35 // '#' +var uppercaseX = 88 // 'X' +var lowercaseX = 120 // 'x' var replacementCharacter = 65533 // '�' // Reference types. @@ -160,7 +160,8 @@ function parse(value, settings) { // Wrap `handleWarning`. warning = handleWarning ? parseError : noop - // Ensure the algorithm walks over the first character and the end (inclusive). + // Ensure the algorithm walks over the first character and the end + // (inclusive). index-- length++ @@ -393,7 +394,7 @@ function parse(value, settings) { } } - // Return the reduced nodes, and any possible warnings. + // Return the reduced nodes. return result.join('') // Get current position. diff --git a/node_modules/parse-entities/package.json b/node_modules/parse-entities/package.json index a5e1bc46..60e191ff 100644 --- a/node_modules/parse-entities/package.json +++ b/node_modules/parse-entities/package.json @@ -1,6 +1,6 @@ { "name": "parse-entities", - "version": "1.2.2", + "version": "2.0.0", "description": "Parse HTML character references: fast, spec-compliant, positional information", "license": "MIT", "keywords": [ @@ -13,6 +13,10 @@ ], "repository": "wooorm/parse-entities", "bugs": "https://github.com/wooorm/parse-entities/issues", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + }, "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", "contributors": [ "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)" @@ -26,8 +30,10 @@ "files": [ "index.js", "decode-entity.js", - "decode-entity.browser.js" + "decode-entity.browser.js", + "types/index.d.ts" ], + "types": "types/index.d.ts", "dependencies": { "character-entities": "^1.0.0", "character-entities-legacy": "^1.0.0", @@ -38,24 +44,26 @@ }, "devDependencies": { "browserify": "^16.0.0", - "nyc": "^14.0.0", - "prettier": "^1.12.1", - "remark-cli": "^6.0.0", - "remark-preset-wooorm": "^4.0.0", - "tape": "^4.2.0", + "dtslint": "^2.0.0", + "nyc": "^15.0.0", + "prettier": "^1.0.0", + "remark-cli": "^7.0.0", + "remark-preset-wooorm": "^6.0.0", + "tape": "^4.0.0", "tape-run": "^6.0.0", - "tinyify": "^2.4.3", - "xo": "^0.24.0" + "tinyify": "^2.0.0", + "xo": "^0.25.0" }, "scripts": { - "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", + "format": "remark . -qfo && prettier --write \"**/*.{js,ts}\" && xo --fix", "build-bundle": "browserify . -s parseEntities > parse-entities.js", "build-mangle": "browserify . -s parseEntities -p tinyify > parse-entities.min.js", "build": "npm run build-bundle && npm run build-mangle", "test-api": "node test", "test-coverage": "nyc --reporter lcov tape test.js", "test-browser": "browserify test.js | tape-run", - "test": "npm run format && npm run build && npm run test-coverage && npm run test-browser" + "test-types": "dtslint types", + "test": "npm run format && npm run build && npm run test-coverage && npm run test-types" }, "nyc": { "check-coverage": true, diff --git a/node_modules/parse-entities/readme.md b/node_modules/parse-entities/readme.md index e9cc0f03..5ca60e7a 100644 --- a/node_modules/parse-entities/readme.md +++ b/node_modules/parse-entities/readme.md @@ -8,15 +8,15 @@ Parse HTML character references: fast, spec-compliant, positional information. -## Installation +## Install [npm][]: -```bash +```sh npm install parse-entities ``` -## Usage +## Use ```js var decode = require('parse-entities') diff --git a/node_modules/parse-entities/types/index.d.ts b/node_modules/parse-entities/types/index.d.ts new file mode 100644 index 00000000..1a494d7d --- /dev/null +++ b/node_modules/parse-entities/types/index.d.ts @@ -0,0 +1,157 @@ +// TypeScript Version: 3.4 + +declare namespace parseEntities { + interface ParseEntitiesOptions< + WC = typeof globalThis, + TC = typeof globalThis, + RC = typeof globalThis + > { + /** + * Additional character to accept (`string?`, default: `''`). + * This allows other characters, without error, when following an ampersand. + */ + additional: string + + /** + * Whether to parse `value` as an attribute value (`boolean?`, default: `false`). + */ + attribute: boolean + + /** + * Whether to allow non-terminated entities (`boolean`, default: `true`). + * For example, `&copycat` for `©cat`. This behaviour is spec-compliant but can lead to unexpected results. + */ + nonTerminated: boolean + + /** + * Error handler (`Function?`). + */ + warning: ErrorHandler<WC> + + /** + * Text handler (`Function?`). + */ + text: TextHandler<TC> + + /** + * Reference handler (`Function?`). + */ + reference: ReferenceHandler<RC> + + /** + * Context used when invoking `warning` (`'*'`, optional). + */ + warningContext: WC + + /** + * Context used when invoking `text` (`'*'`, optional). + */ + textContext: TC + + /** + * Context used when invoking `reference` (`'*'`, optional) + */ + referenceContext: RC + + /** + * Starting `position` of `value` (`Location` or `Position`, optional). Useful when dealing with values nested in some sort of syntax tree. + */ + position: Position + } + + /** + * Error handler. + */ + type ErrorHandler<C> = ( + /** + * `this` refers to `warningContext` when given to `parseEntities`. + */ + this: C, + + /** + * Human-readable reason for triggering a parse error (`string`). + */ + reason: string, + + /** + * Place at which the parse error occurred (`Position`). + */ + position: Position, + + /** + * Identifier of reason for triggering a parse error (`number`). + */ + code: number + ) => void + + /** + * Text handler. + */ + type TextHandler<C> = ( + /** + * `this` refers to `textContext` when given to `parseEntities`. + */ + this: C, + + /** + * String of content (`string`). + */ + value: string, + + /** + * Location at which `value` starts and ends (`Location`). + */ + location: Location + ) => void + + /** + * Character reference handler. + */ + type ReferenceHandler<C> = ( + /** + * `this` refers to `textContext` when given to `parseEntities`. + */ + this: C, + + /** + * String of content (`string`). + */ + value: string, + + /** + * Location at which `value` starts and ends (`Location`). + */ + location: Location, + + /** + * Source of character reference (`Location`). + */ + source: Location + ) => void + + interface Position { + line: number + column: number + offset: number + indent?: number[] + } + + interface Location { + start: Position + end: Position + } +} + +/** + * Decode special characters in `value`. + */ +declare function parseEntities< + WC = typeof globalThis, + TC = typeof globalThis, + RC = typeof globalThis +>( + value: string, + options?: Partial<parseEntities.ParseEntitiesOptions<WC, TC, RC>> +): string + +export = parseEntities diff --git a/node_modules/picomatch/CHANGELOG.md b/node_modules/picomatch/CHANGELOG.md index 1301f3af..b9b9554d 100755 --- a/node_modules/picomatch/CHANGELOG.md +++ b/node_modules/picomatch/CHANGELOG.md @@ -32,6 +32,12 @@ Changelog entries are classified using the following labels _(from [keep-a-chang </details> +## 2.3.0 (2021-05-21) + +### Fixed + +* Fixes bug where file names with two dots were not being matched consistently with negation extglobs containing a star ([56083ef](https://github.com/micromatch/picomatch/commit/56083ef)) + ## 2.2.3 (2021-04-10) ### Fixed diff --git a/node_modules/picomatch/README.md b/node_modules/picomatch/README.md index 85f4bfb1..54822d49 100755 --- a/node_modules/picomatch/README.md +++ b/node_modules/picomatch/README.md @@ -1,18 +1,18 @@ <h1 align="center">Picomatch</h1> <p align="center"> - <a href="https://npmjs.org/package/picomatch"> - <img src="https://img.shields.io/npm/v/picomatch.svg" alt="version"> - </a> - <a href="https://github.com/micromatch/picomatch/actions?workflow=Tests"> - <img src="https://github.com/micromatch/picomatch/workflows/Tests/badge.svg" alt="test status"> - </a> - <a href="https://coveralls.io/github/micromatch/picomatch"> - <img src="https://img.shields.io/coveralls/github/micromatch/picomatch/master.svg" alt="coverage status"> - </a> - <a href="https://npmjs.org/package/picomatch"> - <img src="https://img.shields.io/npm/dm/picomatch.svg" alt="downloads"> - </a> +<a href="https://npmjs.org/package/picomatch"> +<img src="https://img.shields.io/npm/v/picomatch.svg" alt="version"> +</a> +<a href="https://github.com/micromatch/picomatch/actions?workflow=Tests"> +<img src="https://github.com/micromatch/picomatch/workflows/Tests/badge.svg" alt="test status"> +</a> +<a href="https://coveralls.io/github/micromatch/picomatch"> +<img src="https://img.shields.io/coveralls/github/micromatch/picomatch/master.svg" alt="coverage status"> +</a> +<a href="https://npmjs.org/package/picomatch"> +<img src="https://img.shields.io/npm/dm/picomatch.svg" alt="downloads"> +</a> </p> <br> @@ -54,6 +54,7 @@ See the [library comparison](#library-comparisons) to other libraries. * [.parse](#parse) * [.scan](#scan) * [.compileRe](#compilere) + * [.makeRe](#makere) * [.toRegex](#toregex) - [Options](#options) * [Picomatch options](#picomatch-options) @@ -234,7 +235,20 @@ console.log(result); negated: true } ``` -### [.compileRe](lib/picomatch.js#L250) +### [.compileRe](lib/picomatch.js#L245) + +Compile a regular expression from the `state` object returned by the +[parse()](#parse) method. + +**Params** + +* `state` **{Object}** +* `options` **{Object}** +* `returnOutput` **{Boolean}**: Intended for implementors, this argument allows you to return the raw output from the parser. +* `returnState` **{Boolean}**: Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. +* `returns` **{RegExp}** + +### [.makeRe](lib/picomatch.js#L286) Create a regular expression from a parsed glob pattern. @@ -242,6 +256,8 @@ Create a regular expression from a parsed glob pattern. * `state` **{String}**: The object returned from the `.parse` method. * `options` **{Object}** +* `returnOutput` **{Boolean}**: Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. +* `returnState` **{Boolean}**: Implementors may use this argument to return the state from the parsed glob with the returned regular expression. * `returns` **{RegExp}**: Returns a regex created from the given pattern. **Example** @@ -255,7 +271,7 @@ console.log(picomatch.compileRe(state)); //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ ``` -### [.toRegex](lib/picomatch.js#L318) +### [.toRegex](lib/picomatch.js#L321) Create a regular expression from the given regex source string. @@ -562,7 +578,7 @@ If you wish to match the following special characters in a filepath, and you wan Some characters that are used for matching in regular expressions are also regarded as valid file path characters on some platforms. -To match any of the following characters as literals: `$^*+?()[]` +To match any of the following characters as literals: `$^*+?()[] Examples: @@ -688,4 +704,4 @@ npm install -g verbose/verb#dev verb-generate-readme && verb ### License Copyright © 2017-present, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). +Released under the [MIT License](LICENSE). \ No newline at end of file diff --git a/node_modules/picomatch/lib/parse.js b/node_modules/picomatch/lib/parse.js index bb8e0431..c16d59d9 100755 --- a/node_modules/picomatch/lib/parse.js +++ b/node_modules/picomatch/lib/parse.js @@ -92,7 +92,7 @@ const parse = (input, options) => { START_ANCHOR } = PLATFORM_CHARS; - const globstar = (opts) => { + const globstar = opts => { return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; }; @@ -142,12 +142,13 @@ const parse = (input, options) => { const eos = () => state.index === len - 1; const peek = state.peek = (n = 1) => input[state.index + n]; - const advance = state.advance = () => input[++state.index]; + const advance = state.advance = () => input[++state.index] || ''; const remaining = () => input.slice(state.index + 1); const consume = (value = '', num = 0) => { state.consumed += value; state.index += num; }; + const append = token => { state.output += token.output != null ? token.output : token.value; consume(token.value); @@ -203,7 +204,7 @@ const parse = (input, options) => { } } - if (extglobs.length && tok.type !== 'paren' && !EXTGLOB_CHARS[tok.value]) { + if (extglobs.length && tok.type !== 'paren') { extglobs[extglobs.length - 1].inner += tok.value; } @@ -235,6 +236,7 @@ const parse = (input, options) => { const extglobClose = token => { let output = token.close + (opts.capture ? ')' : ''); + let rest; if (token.type === 'negate') { let extglobStar = star; @@ -247,6 +249,10 @@ const parse = (input, options) => { output = token.close = `)$))${extglobStar}`; } + if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { + output = token.close = `)${rest})${extglobStar})`; + } + if (token.prev.type === 'bos') { state.negatedExtglob = true; } @@ -356,9 +362,9 @@ const parse = (input, options) => { } if (opts.unescape === true) { - value = advance() || ''; + value = advance(); } else { - value += advance() || ''; + value += advance(); } if (state.brackets === 0) { @@ -1022,7 +1028,7 @@ parse.fastpaths = (input, options) => { star = `(${star})`; } - const globstar = (opts) => { + const globstar = opts => { if (opts.noglobstar === true) return star; return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; }; diff --git a/node_modules/picomatch/lib/picomatch.js b/node_modules/picomatch/lib/picomatch.js index df7438a9..782d8094 100755 --- a/node_modules/picomatch/lib/picomatch.js +++ b/node_modules/picomatch/lib/picomatch.js @@ -231,68 +231,71 @@ picomatch.parse = (pattern, options) => { picomatch.scan = (input, options) => scan(input, options); /** - * Create a regular expression from a parsed glob pattern. - * - * ```js - * const picomatch = require('picomatch'); - * const state = picomatch.parse('*.js'); - * // picomatch.compileRe(state[, options]); + * Compile a regular expression from the `state` object returned by the + * [parse()](#parse) method. * - * console.log(picomatch.compileRe(state)); - * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ - * ``` - * @param {String} `state` The object returned from the `.parse` method. + * @param {Object} `state` * @param {Object} `options` - * @return {RegExp} Returns a regex created from the given pattern. + * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. + * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. + * @return {RegExp} * @api public */ -picomatch.compileRe = (parsed, options, returnOutput = false, returnState = false) => { +picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { if (returnOutput === true) { - return parsed.output; + return state.output; } const opts = options || {}; const prepend = opts.contains ? '' : '^'; const append = opts.contains ? '' : '$'; - let source = `${prepend}(?:${parsed.output})${append}`; - if (parsed && parsed.negated === true) { + let source = `${prepend}(?:${state.output})${append}`; + if (state && state.negated === true) { source = `^(?!${source}).*$`; } const regex = picomatch.toRegex(source, options); if (returnState === true) { - regex.state = parsed; + regex.state = state; } return regex; }; -picomatch.makeRe = (input, options, returnOutput = false, returnState = false) => { +/** + * Create a regular expression from a parsed glob pattern. + * + * ```js + * const picomatch = require('picomatch'); + * const state = picomatch.parse('*.js'); + * // picomatch.compileRe(state[, options]); + * + * console.log(picomatch.compileRe(state)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `state` The object returned from the `.parse` method. + * @param {Object} `options` + * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. + * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { if (!input || typeof input !== 'string') { throw new TypeError('Expected a non-empty string'); } - const opts = options || {}; let parsed = { negated: false, fastpaths: true }; - let prefix = ''; - let output; - - if (input.startsWith('./')) { - input = input.slice(2); - prefix = parsed.prefix = './'; - } - if (opts.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { - output = parse.fastpaths(input, options); + if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { + parsed.output = parse.fastpaths(input, options); } - if (output === undefined) { + if (!parsed.output) { parsed = parse(input, options); - parsed.prefix = prefix + (parsed.prefix || ''); - } else { - parsed.output = output; } return picomatch.compileRe(parsed, options, returnOutput, returnState); diff --git a/node_modules/picomatch/lib/scan.js b/node_modules/picomatch/lib/scan.js index 456c2f85..e59cd7a1 100755 --- a/node_modules/picomatch/lib/scan.js +++ b/node_modules/picomatch/lib/scan.js @@ -32,7 +32,8 @@ const depth = token => { /** * Quickly scans a glob pattern and returns an object with a handful of * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), - * `glob` (the actual pattern), and `negated` (true if the path starts with `!`). + * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not + * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). * * ```js * const pm = require('picomatch'); @@ -66,6 +67,7 @@ const scan = (input, options) => { let braceEscaped = false; let backslashes = false; let negated = false; + let negatedExtglob = false; let finished = false; let braces = 0; let prev; @@ -177,6 +179,9 @@ const scan = (input, options) => { isGlob = token.isGlob = true; isExtglob = token.isExtglob = true; finished = true; + if (code === CHAR_EXCLAMATION_MARK && index === start) { + negatedExtglob = true; + } if (scanToEnd === true) { while (eos() !== true && (code = advance())) { @@ -330,7 +335,8 @@ const scan = (input, options) => { isGlob, isExtglob, isGlobstar, - negated + negated, + negatedExtglob }; if (opts.tokens === true) { diff --git a/node_modules/picomatch/package.json b/node_modules/picomatch/package.json index 9be45561..4c8acf0b 100755 --- a/node_modules/picomatch/package.json +++ b/node_modules/picomatch/package.json @@ -1,7 +1,7 @@ { "name": "picomatch", "description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.", - "version": "2.2.3", + "version": "2.3.0", "homepage": "https://github.com/micromatch/picomatch", "author": "Jon Schlinkert (https://github.com/jonschlinkert)", "funding": "https://github.com/sponsors/jonschlinkert", diff --git a/node_modules/remark-footnotes/index.js b/node_modules/remark-footnotes/index.js new file mode 100644 index 00000000..04c0ffdf --- /dev/null +++ b/node_modules/remark-footnotes/index.js @@ -0,0 +1,38 @@ +'use strict' + +var syntax = require('micromark-extension-footnote') +var fromMarkdown = require('mdast-util-footnote/from-markdown') +var toMarkdown = require('mdast-util-footnote/to-markdown') +var warningIssued + +module.exports = footnotes + +function footnotes(options) { + var data = this.data() + + /* istanbul ignore next - old remark. */ + if ( + !warningIssued && + ((this.Parser && + this.Parser.prototype && + this.Parser.prototype.blockTokenizers) || + (this.Compiler && + this.Compiler.prototype && + this.Compiler.prototype.visitors)) + ) { + warningIssued = true + console.warn( + '[remark-footnotes] Warning: please upgrade to remark 13 to use this plugin' + ) + } + + add('micromarkExtensions', syntax(options)) + add('fromMarkdownExtensions', fromMarkdown) + add('toMarkdownExtensions', toMarkdown) + + function add(field, value) { + /* istanbul ignore if - other extensions. */ + if (data[field]) data[field].push(value) + else data[field] = [value] + } +} diff --git a/node_modules/remark-footnotes/license b/node_modules/remark-footnotes/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/remark-footnotes/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer <tituswormer@gmail.com> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/remark-footnotes/package.json b/node_modules/remark-footnotes/package.json new file mode 100644 index 00000000..6244e70d --- /dev/null +++ b/node_modules/remark-footnotes/package.json @@ -0,0 +1,91 @@ +{ + "name": "remark-footnotes", + "version": "3.0.0", + "description": "remark plugin to add support for pandoc footnotes", + "license": "MIT", + "keywords": [ + "unified", + "remark", + "remark-plugin", + "plugin", + "mdast", + "markdown", + "footnote", + "note", + "definition", + "pandoc" + ], + "repository": "remarkjs/remark-footnotes", + "bugs": "https://github.com/remarkjs/remark-footnotes/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", + "contributors": [ + "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)" + ], + "types": "types/index.d.ts", + "files": [ + "types/index.d.ts", + "index.js" + ], + "dependencies": { + "mdast-util-footnote": "^0.1.0", + "micromark-extension-footnote": "^0.3.0" + }, + "devDependencies": { + "dtslint": "^4.0.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "rehype-format": "^3.0.0", + "rehype-stringify": "^8.0.0", + "remark-cli": "^8.0.0", + "remark-parse": "^9.0.0-alpha.1", + "remark-preset-wooorm": "^7.0.0", + "remark-rehype": "^8.0.0", + "remark-stringify": "^9.0.0-alpha.1", + "tape": "^5.0.0", + "to-vfile": "^6.0.0", + "unified": "^9.0.0", + "unist-builder": "^2.0.0", + "unist-util-remove-position": "^3.0.0", + "xo": "^0.33.0" + }, + "scripts": { + "format": "remark . -qfo && prettier . --write && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test/index.js", + "test-types": "dtslint types", + "test": "npm run format && npm run test-coverage && npm run test-types" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "rules": { + "unicorn/prefer-optional-catch-binding": "off", + "complexity": "off", + "no-self-compare": "off", + "no-lonely-if": "off" + } + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/remark-footnotes/readme.md b/node_modules/remark-footnotes/readme.md new file mode 100644 index 00000000..28b099f2 --- /dev/null +++ b/node_modules/remark-footnotes/readme.md @@ -0,0 +1,223 @@ +# remark-footnotes + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +[**remark**][remark] plugin to add support for footnotes. + +## Important! + +This plugin is affected by the new parser in remark +([`micromark`](https://github.com/micromark/micromark), +see [`remarkjs/remark#536`](https://github.com/remarkjs/remark/pull/536)). +Use version 2 while you’re still on remark 12. +Use version 3 for remark 13+. + +## Install + +[npm][]: + +```sh +npm install remark-footnotes +``` + +## Use + +Say we have the following file, `example.md`: + +```markdown +Here is a footnote reference,[^1] +another,[^longnote], +and optionally there are inline +notes.^[you can type them inline, which may be easier, since you don’t +have to pick an identifier and move down to type the note.] + +[^1]: Here is the footnote. + +[^longnote]: Here’s one with multiple blocks. + + Subsequent paragraphs are indented to show that they +belong to the previous footnote. + + { some.code } + + The whole paragraph can be indented, or just the first + line. In this way, multi-paragraph footnotes work like + multi-paragraph list items. + +This paragraph won’t be part of the note, because it +isn’t indented. +``` + +And our script, `example.js`, looks as follows: + +```js +var vfile = require('to-vfile') +var unified = require('unified') +var markdown = require('remark-parse') +var remark2rehype = require('remark-rehype') +var format = require('rehype-format') +var html = require('rehype-stringify') +var footnotes = require('remark-footnotes') + +unified() + .use(markdown) + .use(footnotes, {inlineNotes: true}) + .use(remark2rehype) + .use(format) + .use(html) + .process(vfile.readSync('example.md'), function (err, file) { + if (err) throw err + console.log(String(file)) + }) +``` + +Now, running `node example` yields: + +```html +<p> + Here is a footnote reference,<sup id="fnref-1"><a href="#fn-1" class="footnote-ref">1</a></sup> + another,<sup id="fnref-longnote"><a href="#fn-longnote" class="footnote-ref">longnote</a></sup>, + and optionally there are inline + notes.<sup id="fnref-2"><a href="#fn-2" class="footnote-ref">2</a></sup> +</p> +<p> + This paragraph won’t be part of the note, because it + isn’t indented. +</p> +<div class="footnotes"> + <hr> + <ol> + <li id="fn-1"> + <p>Here is the footnote.<a href="#fnref-1" class="footnote-backref">↩</a></p> + </li> + <li id="fn-longnote"> + <p>Here’s one with multiple blocks.</p> + <p> + Subsequent paragraphs are indented to show that they + belong to the previous footnote. + </p> + <pre><code>{ some.code } +</code></pre> + <p> + The whole paragraph can be indented, or just the first + line. In this way, multi-paragraph footnotes work like + multi-paragraph list items.<a href="#fnref-longnote" class="footnote-backref">↩</a> + </p> + </li> + <li id="fn-2"> + <p> + you can type them inline, which may be easier, since you don’t + have to pick an identifier and move down to type the note.<a href="#fnref-2" class="footnote-backref">↩</a> + </p> + </li> + </ol> +</div> +``` + +## API + +### `remark().use(footnotes[, options])` + +Plugin to add support for footnotes. + +###### `options.inlineNotes` + +Whether to support `^[inline notes]` (`boolean`, default: `false`). +Passed to [`micromark-extension-footnote`][mm-footnote]. + +###### Notes + +* Labels, such as `[^this]` (in a footnote reference) or `[^this]:` (in a + footnote definition) work like link references +* Footnote definitions work like lists +* Image and link references cannot start with carets, so `![^this doesn’t + work][]` + +## Security + +Use of `remark-footnotes` does not involve [**rehype**][rehype] +([**hast**][hast]) or user content so there are no openings for [cross-site +scripting (XSS)][xss] attacks. + +## Related + +* [`remark-gfm`](https://github.com/remarkjs/remark-gfm) + — GitHub Flavored Markdown +* [`remark-frontmatter`](https://github.com/remarkjs/remark-frontmatter) + — Frontmatter (YAML, TOML, and more) +* [`remark-math`](https://github.com/remarkjs/remark-math) + — Math +* [`remark-github`](https://github.com/remarkjs/remark-github) + — Auto-link references like in GitHub issues, PRs, and comments + +## Contribute + +See [`contributing.md`][contributing] in [`remarkjs/.github`][health] for ways +to get started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + +<!-- Definitions --> + +[build-badge]: https://img.shields.io/travis/remarkjs/remark-footnotes/main.svg + +[build]: https://travis-ci.org/remarkjs/remark-footnotes + +[coverage-badge]: https://img.shields.io/codecov/c/github/remarkjs/remark-footnotes.svg + +[coverage]: https://codecov.io/github/remarkjs/remark-footnotes + +[downloads-badge]: https://img.shields.io/npm/dm/remark-footnotes.svg + +[downloads]: https://www.npmjs.com/package/remark-footnotes + +[size-badge]: https://img.shields.io/bundlephobia/minzip/remark-footnotes.svg + +[size]: https://bundlephobia.com/result?p=remark-footnotes + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/remarkjs/remark/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[health]: https://github.com/remarkjs/.github + +[contributing]: https://github.com/remarkjs/.github/blob/HEAD/contributing.md + +[support]: https://github.com/remarkjs/.github/blob/HEAD/support.md + +[coc]: https://github.com/remarkjs/.github/blob/HEAD/code-of-conduct.md + +[license]: license + +[author]: https://wooorm.com + +[remark]: https://github.com/remarkjs/remark + +[xss]: https://en.wikipedia.org/wiki/Cross-site_scripting + +[rehype]: https://github.com/rehypejs/rehype + +[hast]: https://github.com/syntax-tree/hast + +[mm-footnote]: https://github.com/micromark/micromark-extension-footnote#optionsinlinenotes diff --git a/node_modules/remark-footnotes/types/index.d.ts b/node_modules/remark-footnotes/types/index.d.ts new file mode 100644 index 00000000..080b9a91 --- /dev/null +++ b/node_modules/remark-footnotes/types/index.d.ts @@ -0,0 +1,20 @@ +// TypeScript Version: 3.4 + +import {Plugin} from 'unified' + +declare namespace remarkFootnotes { + type Footnotes = Plugin<[RemarkFootnotesOptions?]> + + interface RemarkFootnotesOptions { + /** + * Whether to support `^[inline notes]` + * + * @defaultValue false + */ + inlineNotes?: boolean + } +} + +declare const remarkFootnotes: remarkFootnotes.Footnotes + +export = remarkFootnotes diff --git a/node_modules/remark-frontmatter/index.js b/node_modules/remark-frontmatter/index.js index 3fa9ecbb..45609b9b 100644 --- a/node_modules/remark-frontmatter/index.js +++ b/node_modules/remark-frontmatter/index.js @@ -1,63 +1,19 @@ 'use strict' -var xtend = require('xtend') -var matters = require('./lib/matters') -var parse = require('./lib/parse') -var compile = require('./lib/compile') +var syntax = require('micromark-extension-frontmatter') +var fromMarkdown = require('mdast-util-frontmatter/from-markdown') +var toMarkdown = require('mdast-util-frontmatter/to-markdown') module.exports = frontmatter function frontmatter(options) { - var parser = this.Parser - var compiler = this.Compiler - var config = matters(options || ['yaml']) - - if (isRemarkParser(parser)) { - attachParser(parser, config) - } - - if (isRemarkCompiler(compiler)) { - attachCompiler(compiler, config) - } -} - -function attachParser(parser, matters) { - var proto = parser.prototype - var tokenizers = wrap(parse, matters) - var names = [] - var key - - for (key in tokenizers) { - names.push(key) + var data = this.data() + add('micromarkExtensions', syntax(options)) + add('fromMarkdownExtensions', fromMarkdown(options)) + add('toMarkdownExtensions', toMarkdown(options)) + function add(field, value) { + /* istanbul ignore if - other extensions. */ + if (data[field]) data[field].push(value) + else data[field] = [value] } - - proto.blockMethods = names.concat(proto.blockMethods) - proto.blockTokenizers = xtend(tokenizers, proto.blockTokenizers) -} - -function attachCompiler(compiler, matters) { - var proto = compiler.prototype - proto.visitors = xtend(wrap(compile, matters), proto.visitors) -} - -function wrap(func, matters) { - var result = {} - var length = matters.length - var index = -1 - var tuple - - while (++index < length) { - tuple = func(matters[index]) - result[tuple[0]] = tuple[1] - } - - return result -} - -function isRemarkParser(parser) { - return Boolean(parser && parser.prototype && parser.prototype.blockTokenizers) -} - -function isRemarkCompiler(compiler) { - return Boolean(compiler && compiler.prototype && compiler.prototype.visitors) } diff --git a/node_modules/remark-frontmatter/lib/compile.js b/node_modules/remark-frontmatter/lib/compile.js deleted file mode 100644 index 109b6eac..00000000 --- a/node_modules/remark-frontmatter/lib/compile.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict' - -var fence = require('./fence') - -module.exports = create - -function create(matter) { - var type = matter.type - var open = fence(matter, 'open') - var close = fence(matter, 'close') - - frontmatter.displayName = type + 'FrontMatter' - - return [type, frontmatter] - - function frontmatter(node) { - return open + (node.value ? '\n' + node.value : '') + '\n' + close - } -} diff --git a/node_modules/remark-frontmatter/lib/fence.js b/node_modules/remark-frontmatter/lib/fence.js deleted file mode 100644 index 9e80c48c..00000000 --- a/node_modules/remark-frontmatter/lib/fence.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' - -module.exports = fence - -function fence(matter, prop) { - var marker - - if (matter.marker) { - marker = pick(matter.marker, prop) - return marker + marker + marker - } - - return pick(matter.fence, prop) -} - -function pick(schema, prop) { - return typeof schema === 'string' ? schema : schema[prop] -} diff --git a/node_modules/remark-frontmatter/lib/parse.js b/node_modules/remark-frontmatter/lib/parse.js deleted file mode 100644 index 28529839..00000000 --- a/node_modules/remark-frontmatter/lib/parse.js +++ /dev/null @@ -1,46 +0,0 @@ -'use strict' - -var fence = require('./fence') - -module.exports = create - -function create(matter) { - var name = matter.type + 'FrontMatter' - var open = fence(matter, 'open') - var close = fence(matter, 'close') - var newline = '\n' - var anywhere = matter.anywhere - - frontmatter.displayName = name - frontmatter.onlyAtStart = typeof anywhere === 'boolean' ? !anywhere : true - - return [name, frontmatter] - - function frontmatter(eat, value, silent) { - var index = open.length - var offset - - if (value.slice(0, index) !== open || value.charAt(index) !== newline) { - return - } - - offset = value.indexOf(close, index) - - while (offset !== -1 && value.charAt(offset - 1) !== newline) { - index = offset + close.length - offset = value.indexOf(close, index) - } - - if (offset !== -1) { - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true - } - - return eat(value.slice(0, offset + close.length))({ - type: matter.type, - value: value.slice(open.length + 1, offset - 1) - }) - } - } -} diff --git a/node_modules/remark-frontmatter/package.json b/node_modules/remark-frontmatter/package.json index 3de12d17..b00b4886 100644 --- a/node_modules/remark-frontmatter/package.json +++ b/node_modules/remark-frontmatter/package.json @@ -1,6 +1,6 @@ { "name": "remark-frontmatter", - "version": "1.3.3", + "version": "3.0.0", "description": "remark plugin to support frontmatter (yaml, toml, and more)", "license": "MIT", "keywords": [ @@ -28,35 +28,38 @@ ], "files": [ "index.js", - "lib" + "types/index.d.ts" ], + "types": "types/index.d.ts", "dependencies": { - "fault": "^1.0.1", - "xtend": "^4.0.1" + "mdast-util-frontmatter": "^0.2.0", + "micromark-extension-frontmatter": "^0.2.0" }, "devDependencies": { "browserify": "^16.0.0", + "dtslint": "^4.0.0", "is-hidden": "^1.0.0", "not": "^0.1.0", "nyc": "^15.0.0", "prettier": "^2.0.0", - "remark": "^11.0.0", - "remark-cli": "^7.0.0", - "remark-preset-wooorm": "^6.0.0", - "tape": "^4.0.0", - "tinyify": "^2.0.0", + "remark": "^13.0.0-alpha.1", + "remark-cli": "^8.0.0", + "remark-preset-wooorm": "^7.0.0", + "tape": "^5.0.0", + "tinyify": "^3.0.0", "to-vfile": "^6.0.0", - "unified": "^8.0.0", - "xo": "^0.28.0" + "unified": "^9.0.0", + "xo": "^0.33.0" }, "scripts": { - "format": "remark *.md -qfo && prettier --write \"**/*.js\" && xo --fix", + "format": "remark . -qfo --ignore-pattern test/ && prettier . --write && xo --fix", "build-bundle": "browserify . -s remarkFrontmatter > remark-frontmatter.js", "build-mangle": "browserify . -s remarkFrontmatter -p tinyify > remark-frontmatter.min.js", "build": "npm run build-bundle && npm run build-mangle", "test-api": "node test", "test-coverage": "nyc --reporter lcov tape test/index.js", - "test": "npm run format && npm run build && npm run test-coverage" + "test-types": "dtslint types", + "test": "npm run format && npm run build && npm run test-coverage && npm run test-types" }, "nyc": { "check-coverage": true, @@ -76,9 +79,12 @@ "prettier": true, "esnext": false, "rules": { + "unicorn/no-fn-reference-in-iterator": "off", + "unicorn/prefer-optional-catch-binding": "off", "guard-for-in": "off" }, "ignores": [ + "types/", "remark-frontmatter.js" ] }, diff --git a/node_modules/remark-frontmatter/readme.md b/node_modules/remark-frontmatter/readme.md index 16aac62b..bf161c4b 100644 --- a/node_modules/remark-frontmatter/readme.md +++ b/node_modules/remark-frontmatter/readme.md @@ -10,6 +10,14 @@ [**remark**][remark] plugin to support frontmatter (YAML, TOML, and more). +## Important! + +This plugin is affected by the new parser in remark +([`micromark`](https://github.com/micromark/micromark), +see [`remarkjs/remark#536`](https://github.com/remarkjs/remark/pull/536)). +Use version 2 while you’re still on remark 12. +Use version 3 for remark 13+. + ## Install [npm][]: @@ -45,9 +53,9 @@ unified() .use(stringify) .use(frontmatter, ['yaml', 'toml']) .use(logger) - .process(vfile.readSync('example.md'), function(err, file) { - console.log(String(file)) + .process(vfile.readSync('example.md'), function (err, file) { console.error(report(err || file)) + console.log(String(file)) }) function logger() { @@ -58,16 +66,17 @@ function logger() { Now, running `node example` yields: ```js -{ type: 'root', - children: - [ { type: 'toml', - value: 'title = "New Website"', - position: [Object] }, - { type: 'heading', - depth: 1, - children: [Array], - position: [Object] } ], - position: [Object] } +{ + type: 'root', + children: [ + {type: 'toml', value: 'title = "New Website"', position: [Object]}, + {type: 'heading', depth: 1, children: [Array], position: [Object]} + ], + position: { + start: {line: 1, column: 1, offset: 0}, + end: {line: 6, column: 1, offset: 48} + } +} ``` ```markdown @@ -83,117 +92,12 @@ title = "New Website" ### `remark().use(frontmatter[, options])` -Support frontmatter (YAML, TOML, and more). -Adds [tokenizers][] if the [processor][] is configured with -[`remark-parse`][parse], and [visitors][] if configured with -[`remark-stringify`][stringify]. - -If you are parsing from a different syntax, or compiling to a different syntax -(such as, [`remark-man`][man]) your custom nodes may not be supported. +Configures remark so that it can parse and serialize frontmatter (YAML, TOML, +and more). ##### `options` -One [`preset`][preset] or [`Matter`][matter], or an array of them, defining all -the supported frontmatters (default: `'yaml'`). - -##### `preset` - -Either `'yaml'` or `'toml'`: - -* `'yaml'` — [`matter`][matter] defined as `{type: 'yaml', marker: '-'}` -* `'toml'` — [`matter`][matter] defined as `{type: 'toml', marker: '+'}` - -##### `Matter` - -An object with a `type` and either a `marker` or a `fence`: - -* `type` (`string`) - — Node type to parse to in [mdast][] and compile from -* `marker` (`string` or `{open: string, close: string}`) - — Character used to construct fences. - By providing an object with `open` and `close`. - different characters can be used for opening and closing fences. - For example the character `'-'` will result in `'---'` being used as the - fence -* `fence` (`string` or `{open: string, close: string}`) - — String used as the complete fence. - By providing an object with `open` and `close` different values can be used - for opening and closing fences. - This can be used too if fences contain different characters or lengths other - than 3 -* `anywhere` (`boolean`, default: `false`) - – if `true`, matter can be found anywhere in the document. - If `false` (default), only matter at the start of the document is recognized - -###### Example - -For `{type: 'yaml', marker: '-'}`: - -```yaml ---- -key: value ---- -``` - -Yields: - -```json -{ - "type": "yaml", - "value": "key: value" -} -``` - -For `{type: 'custom', marker: {open: '<', close: '>'}}`: - -```text -<<< -data ->>> -``` - -Yields: - -```json -{ - "type": "custom", - "value": "data" -} -``` - -For `{type: 'custom', fence: '+=+=+=+'}`: - -```text -+=+=+=+ -data -+=+=+=+ -``` - -Yields: - -```json -{ - "type": "custom", - "value": "data" -} -``` - -For `{type: 'json', fence: {open: '{', close: '}'}}`: - -```json -{ - "key": "value" -} -``` - -Yields: - -```json -{ - "type": "json", - "value": "\"key\": \"value\"" -} -``` +See [`micromark-extension-frontmatter`][options] for a description of `options`. ## Security @@ -203,10 +107,14 @@ Use of `remark-frontmatter` does not involve [**rehype**][rehype] ## Related +* [`remark-gfm`](https://github.com/remarkjs/remark-gfm) + — GitHub Flavored Markdown +* [`remark-footnotes`](https://github.com/remarkjs/remark-footnotes) + — Footnotes +* [`remark-math`](https://github.com/remarkjs/remark-math) + — Math * [`remark-github`](https://github.com/remarkjs/remark-github) — Auto-link references like in GitHub issues, PRs, and comments -* [`remark-math`](https://github.com/rokt33r/remark-math) - — Math support * [`remark-yaml-config`](https://github.com/remarkjs/remark-yaml-config) — Configure remark from YAML configuration @@ -226,7 +134,7 @@ abide by its terms. <!-- Definitions --> -[build-badge]: https://img.shields.io/travis/remarkjs/remark-frontmatter/master.svg +[build-badge]: https://img.shields.io/travis/remarkjs/remark-frontmatter/main.svg [build]: https://travis-ci.org/remarkjs/remark-frontmatter @@ -248,19 +156,19 @@ abide by its terms. [collective]: https://opencollective.com/unified -[chat-badge]: https://img.shields.io/badge/chat-spectrum-7b16ff.svg +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg -[chat]: https://spectrum.chat/unified/remark +[chat]: https://github.com/remarkjs/remark/discussions [npm]: https://docs.npmjs.com/cli/install [health]: https://github.com/remarkjs/.github -[contributing]: https://github.com/remarkjs/.github/blob/master/contributing.md +[contributing]: https://github.com/remarkjs/.github/blob/HEAD/contributing.md -[support]: https://github.com/remarkjs/.github/blob/master/support.md +[support]: https://github.com/remarkjs/.github/blob/HEAD/support.md -[coc]: https://github.com/remarkjs/.github/blob/master/code-of-conduct.md +[coc]: https://github.com/remarkjs/.github/blob/HEAD/code-of-conduct.md [license]: license @@ -268,26 +176,10 @@ abide by its terms. [remark]: https://github.com/remarkjs/remark -[parse]: https://github.com/remarkjs/remark/tree/master/packages/remark-parse - -[tokenizers]: https://github.com/remarkjs/remark/tree/master/packages/remark-parse#parserblocktokenizers - -[stringify]: https://github.com/remarkjs/remark/tree/master/packages/remark-stringify - -[visitors]: https://github.com/remarkjs/remark/tree/master/packages/remark-stringify#compilervisitors - -[processor]: https://github.com/unifiedjs/unified#processor - -[mdast]: https://github.com/syntax-tree/mdast - -[man]: https://github.com/remarkjs/remark-man - -[preset]: #preset - -[matter]: #matter - [xss]: https://en.wikipedia.org/wiki/Cross-site_scripting [rehype]: https://github.com/rehypejs/rehype [hast]: https://github.com/syntax-tree/hast + +[options]: https://github.com/micromark/micromark-extension-frontmatter#options diff --git a/node_modules/remark-frontmatter/types/index.d.ts b/node_modules/remark-frontmatter/types/index.d.ts new file mode 100644 index 00000000..a256daf1 --- /dev/null +++ b/node_modules/remark-frontmatter/types/index.d.ts @@ -0,0 +1,63 @@ +// Minimum TypeScript Version: 3.2 +import {Plugin} from 'unified' +import {Node} from 'unist' + +declare namespace remarkFrontmatter { + type Frontmatter = Plugin<[RemarkFrontmatterOptions?]> + + type Preset = 'yaml' | 'toml' + + interface Fence { + open: string + close: string + } + + interface Matter { + /** + * Node type to parse to in [mdast](https://github.com/syntax-tree/mdast) and compile from. + */ + type: string + + /** + * Character used to construct fences. + * By providing an object with `open` and `close`. + * different characters can be used for opening and closing fences. + * For example the character `'-'` will result in `'---'` being used as the + * fence. + */ + marker?: string | Fence + + /** + * String used as the complete fence. + * By providing an object with `open` and `close` different values can be used + * for opening and closing fences. + * This can be used too if fences contain different characters or lengths other + * than 3 + */ + fence?: string | Fence + + /** + * If `true`, matter can be found anywhere in the document. + * If `false` (default), only matter at the start of the document is recognized + * + * @default false + */ + anywhere?: boolean + } + + type RemarkFrontmatterOptions = Array<Preset | Matter> + + interface YamlNode extends Node { + type: 'yaml' + value: string + } + + interface TomlNode extends Node { + type: 'toml' + value: string + } +} + +declare const remarkFrontmatter: remarkFrontmatter.Frontmatter + +export = remarkFrontmatter diff --git a/node_modules/remark-gfm/index.js b/node_modules/remark-gfm/index.js new file mode 100644 index 00000000..1f9d36b8 --- /dev/null +++ b/node_modules/remark-gfm/index.js @@ -0,0 +1,39 @@ +'use strict' + +var syntax = require('micromark-extension-gfm') +var fromMarkdown = require('mdast-util-gfm/from-markdown') +var toMarkdown = require('mdast-util-gfm/to-markdown') + +var warningIssued + +module.exports = gfm + +function gfm(options) { + var data = this.data() + + /* istanbul ignore next - old remark. */ + if ( + !warningIssued && + ((this.Parser && + this.Parser.prototype && + this.Parser.prototype.blockTokenizers) || + (this.Compiler && + this.Compiler.prototype && + this.Compiler.prototype.visitors)) + ) { + warningIssued = true + console.warn( + '[remark-gfm] Warning: please upgrade to remark 13 to use this plugin' + ) + } + + add('micromarkExtensions', syntax(options)) + add('fromMarkdownExtensions', fromMarkdown) + add('toMarkdownExtensions', toMarkdown(options)) + + function add(field, value) { + /* istanbul ignore if - other extensions. */ + if (data[field]) data[field].push(value) + else data[field] = [value] + } +} diff --git a/node_modules/remark-gfm/license b/node_modules/remark-gfm/license new file mode 100644 index 00000000..39372356 --- /dev/null +++ b/node_modules/remark-gfm/license @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2020 Titus Wormer <tituswormer@gmail.com> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/remark-gfm/package.json b/node_modules/remark-gfm/package.json new file mode 100644 index 00000000..ed9245f8 --- /dev/null +++ b/node_modules/remark-gfm/package.json @@ -0,0 +1,88 @@ +{ + "name": "remark-gfm", + "version": "1.0.0", + "description": "remark plugin to support GFM (autolink literals, strikethrough, tables, tasklists)", + "license": "MIT", + "keywords": [ + "unified", + "remark", + "remark-plugin", + "plugin", + "mdast", + "markdown", + "table", + "strikethrough", + "tasklist", + "autolink", + "github", + "gfm" + ], + "repository": "remarkjs/remark-gfm", + "bugs": "https://github.com/remarkjs/remark-gfm/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", + "contributors": [ + "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)" + ], + "types": "types/index.d.ts", + "files": [ + "types/index.d.ts", + "index.js" + ], + "dependencies": { + "mdast-util-gfm": "^0.1.0", + "micromark-extension-gfm": "^0.3.0" + }, + "devDependencies": { + "dtslint": "^4.0.0", + "is-hidden": "^1.0.0", + "not": "^0.1.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark": "^13.0.0-alpha.1", + "remark-cli": "^8.0.0", + "remark-preset-wooorm": "^7.0.0", + "string-width": "^4.2.0", + "tape": "^5.0.0", + "to-vfile": "^6.0.0", + "unified": "^9.0.0", + "xo": "^0.33.0" + }, + "scripts": { + "format": "remark . -qfo --ignore-pattern test/ && prettier . --write && xo --fix", + "test-api": "node test", + "test-coverage": "nyc --reporter lcov tape test/index.js", + "test-types": "dtslint types", + "test": "npm run format && npm run test-coverage && npm run test-types" + }, + "nyc": { + "check-coverage": true, + "lines": 100, + "functions": 100, + "branches": 100 + }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, + "xo": { + "prettier": true, + "esnext": false, + "rules": { + "unicorn/no-fn-reference-in-iterator": "off", + "unicorn/prefer-optional-catch-binding": "off" + } + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] + } +} diff --git a/node_modules/remark-gfm/readme.md b/node_modules/remark-gfm/readme.md new file mode 100644 index 00000000..e40c39ce --- /dev/null +++ b/node_modules/remark-gfm/readme.md @@ -0,0 +1,230 @@ +# remark-gfm + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +[**remark**][remark] plugin to support [GitHub Flavored Markdown][gfm]. + +## Important! + +This plugin is made for the new parser in remark +([`micromark`](https://github.com/micromark/micromark), +see [`remarkjs/remark#536`](https://github.com/remarkjs/remark/pull/536)). +While you’re still on remark 12, use the `gfm` option for remark. +Use this plugin for remark 13+. + +## Install + +[npm][]: + +```sh +npm install remark-gfm +``` + +## Use + +Say we have the following file, `example.md`: + +```markdown +# GFM + +## Autolink literals + +www.example.com, https://example.com, and contact@example.com. + +## Strikethrough + +~one~ or ~~two~~ tildes. + +## Table + +| a | b | c | d | +| - | :- | -: | :-: | + +## Tasklist + +* [ ] to do +* [x] done +``` + +And our script, `example.js`, looks as follows: + +```js +var vfile = require('to-vfile') +var report = require('vfile-reporter') +var unified = require('unified') +var parse = require('remark-parse') +var gfm = require('remark-gfm') +var remark2rehype = require('remark-rehype') +var stringify = require('rehype-stringify') + +unified() + .use(parse) + .use(gfm) + .use(remark2rehype) + .use(stringify) + .process(vfile.readSync('example.md'), function (err, file) { + console.error(report(err || file)) + console.log(String(file)) + }) +``` + +Now, running `node example` yields: + +```html +example.md: no issues found +<h1>GFM</h1> +<h2>Autolink literals</h2> +<p><a href="http://www.example.com">www.example.com</a>, <a href="https://example.com">https://example.com</a>, and <a href="mailto:contact@example.com">contact@example.com</a>.</p> +<h2>Strikethrough</h2> +<p><del>one</del> or <del>two</del> tildes.</p> +<h2>Table</h2> +<table> +<thead> +<tr> +<th>a</th> +<th align="left">b</th> +<th align="right">c</th> +<th align="center">d</th> +</tr> +</thead> +</table> +<h2>Tasklist</h2> +<ul class="contains-task-list"> +<li class="task-list-item"><input type="checkbox" disabled> to do</li> +<li class="task-list-item"><input type="checkbox" checked disabled> done</li> +</ul> +``` + +## API + +### `remark().use(gfm[, options])` + +Configures remark so that it can parse and serialize GFM (autolink literals, +strikethrough, tables, tasklists). + +##### `options` + +###### `options.singleTilde` + +Whether to support strikethrough with a single tilde (`boolean`, default: +`true`). +Single tildes work on github.com, but are technically prohibited by the GFM +spec. +Passed as `singleTilde` to +[`micromark-extension-gfm-strikethrough`][strikethrough]. + +###### `options.tableCellPadding` + +Create tables with a space between cell delimiters (`|`) and content (`boolean`, +default: `true`). +Passed to [`mdast-util-gfm-table`][table]. + +###### `options.tablePipeAlign` + +Align the delimiters (`|`) between table cells so that they all align nicely and +form a grid (`boolean`, default: `true`). +Passed to [`mdast-util-gfm-table`][table]. + +###### `options.stringLength` + +Function passed to [`markdown-table`][markdown-table] to detect the length of a +table cell (`Function`, default: [`s => s.length`][string-length]). +Used to align table cells. +Passed to [`mdast-util-gfm-table`][table]. + +## Security + +Use of `remark-gfm` does not involve [**rehype**][rehype] ([**hast**][hast]) or +user content so there are no openings for [cross-site scripting (XSS)][xss] +attacks. + +## Related + +* [`remark-github`](https://github.com/remarkjs/remark-github) + — Autolink references like in GitHub issues, PRs, and comments +* [`remark-footnotes`](https://github.com/remarkjs/remark-footnotes) + — Footnotes +* [`remark-frontmatter`](https://github.com/remarkjs/remark-frontmatter) + — Frontmatter (YAML, TOML, and more) +* [`remark-math`](https://github.com/rokt33r/remark-math) + — Math + +## Contribute + +See [`contributing.md`][contributing] in [`remarkjs/.github`][health] for ways +to get started. +See [`support.md`][support] for ways to get help. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## License + +[MIT][license] © [Titus Wormer][author] + +<!-- Definitions --> + +[build-badge]: https://img.shields.io/travis/remarkjs/remark-gfm/main.svg + +[build]: https://travis-ci.org/remarkjs/remark-gfm + +[coverage-badge]: https://img.shields.io/codecov/c/github/remarkjs/remark-gfm.svg + +[coverage]: https://codecov.io/github/remarkjs/remark-gfm + +[downloads-badge]: https://img.shields.io/npm/dm/remark-gfm.svg + +[downloads]: https://www.npmjs.com/package/remark-gfm + +[size-badge]: https://img.shields.io/bundlephobia/minzip/remark-gfm.svg + +[size]: https://bundlephobia.com/result?p=remark-gfm + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/remarkjs/remark/discussions + +[npm]: https://docs.npmjs.com/cli/install + +[health]: https://github.com/remarkjs/.github + +[contributing]: https://github.com/remarkjs/.github/blob/HEAD/contributing.md + +[support]: https://github.com/remarkjs/.github/blob/HEAD/support.md + +[coc]: https://github.com/remarkjs/.github/blob/HEAD/code-of-conduct.md + +[license]: license + +[author]: https://wooorm.com + +[remark]: https://github.com/remarkjs/remark + +[xss]: https://en.wikipedia.org/wiki/Cross-site_scripting + +[rehype]: https://github.com/rehypejs/rehype + +[hast]: https://github.com/syntax-tree/hast + +[gfm]: https://github.github.com/gfm/ + +[table]: https://github.com/syntax-tree/mdast-util-gfm-table#api + +[markdown-table]: https://github.com/wooorm/markdown-table + +[string-length]: https://github.com/wooorm/markdown-table#optionsstringlength + +[strikethrough]: https://github.com/micromark/micromark-extension-gfm-strikethrough#api diff --git a/node_modules/remark-gfm/types/index.d.ts b/node_modules/remark-gfm/types/index.d.ts new file mode 100644 index 00000000..a37d302e --- /dev/null +++ b/node_modules/remark-gfm/types/index.d.ts @@ -0,0 +1,39 @@ +// TypeScript Version: 3.4 + +import {Plugin} from 'unified' + +declare namespace remarkGfm { + type Gfm = Plugin<[RemarkGfmOptions?]> + + interface RemarkGfmOptions { + /** + * Whether to support `~single tilde~` strikethrough. + * + * @defaultValue true + */ + singleTilde?: boolean + /** + * Create tables with a space between cell delimiters (`|`) and content. + * + * @defaultValue true + */ + tableCellPadding?: boolean + /** + * Align the delimiters (`|`) between table cells so that they all align + * nicely and form a grid. + * + * @defaultValue true + */ + tablePipeAlign?: boolean + /** + * Function to detect the length of a table cell. Used to align tables. + * + * @defaultValue s => s.length + */ + stringLength?: (s: string) => number + } +} + +declare const remarkGfm: remarkGfm.Gfm + +export = remarkGfm diff --git a/node_modules/remark-parse/index.js b/node_modules/remark-parse/index.js index 1579e355..9719b797 100644 --- a/node_modules/remark-parse/index.js +++ b/node_modules/remark-parse/index.js @@ -1,14 +1,24 @@ -'use strict'; +'use strict' -var unherit = require('unherit'); -var xtend = require('xtend'); -var Parser = require('./lib/parser.js'); +module.exports = parse -module.exports = parse; -parse.Parser = Parser; +var fromMarkdown = require('mdast-util-from-markdown') function parse(options) { - var Local = unherit(Parser); - Local.prototype.options = xtend(Local.prototype.options, this.data('settings'), options); - this.Parser = Local; + var self = this + + this.Parser = parse + + function parse(doc) { + return fromMarkdown( + doc, + Object.assign({}, self.data('settings'), options, { + // Note: these options are not in the readme. + // The goal is for them to be set by plugins on `data` instead of being + // passed by users. + extensions: self.data('micromarkExtensions') || [], + mdastExtensions: self.data('fromMarkdownExtensions') || [] + }) + ) + } } diff --git a/node_modules/remark-parse/lib/block-elements.json b/node_modules/remark-parse/lib/block-elements.json deleted file mode 100644 index 2d13b561..00000000 --- a/node_modules/remark-parse/lib/block-elements.json +++ /dev/null @@ -1,68 +0,0 @@ -[ - "address", - "article", - "aside", - "base", - "basefont", - "blockquote", - "body", - "caption", - "center", - "col", - "colgroup", - "dd", - "details", - "dialog", - "dir", - "div", - "dl", - "dt", - "fieldset", - "figcaption", - "figure", - "footer", - "form", - "frame", - "frameset", - "h1", - "h2", - "h3", - "h4", - "h5", - "h6", - "head", - "header", - "hgroup", - "hr", - "html", - "iframe", - "legend", - "li", - "link", - "main", - "menu", - "menuitem", - "meta", - "nav", - "noframes", - "ol", - "optgroup", - "option", - "p", - "param", - "pre", - "section", - "source", - "title", - "summary", - "table", - "tbody", - "td", - "tfoot", - "th", - "thead", - "title", - "tr", - "track", - "ul" -] diff --git a/node_modules/remark-parse/lib/decode.js b/node_modules/remark-parse/lib/decode.js deleted file mode 100644 index fd45b729..00000000 --- a/node_modules/remark-parse/lib/decode.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict'; - -var xtend = require('xtend'); -var entities = require('parse-entities'); - -module.exports = factory; - -/* Factory to create an entity decoder. */ -function factory(ctx) { - decoder.raw = decodeRaw; - - return decoder; - - /* Normalize `position` to add an `indent`. */ - function normalize(position) { - var offsets = ctx.offset; - var line = position.line; - var result = []; - - while (++line) { - if (!(line in offsets)) { - break; - } - - result.push((offsets[line] || 0) + 1); - } - - return { - start: position, - indent: result - }; - } - - /* Handle a warning. - * See https://github.com/wooorm/parse-entities - * for the warnings. */ - function handleWarning(reason, position, code) { - if (code === 3) { - return; - } - - ctx.file.message(reason, position); - } - - /* Decode `value` (at `position`) into text-nodes. */ - function decoder(value, position, handler) { - entities(value, { - position: normalize(position), - warning: handleWarning, - text: handler, - reference: handler, - textContext: ctx, - referenceContext: ctx - }); - } - - /* Decode `value` (at `position`) into a string. */ - function decodeRaw(value, position, options) { - return entities(value, xtend(options, { - position: normalize(position), - warning: handleWarning - })); - } -} diff --git a/node_modules/remark-parse/lib/defaults.js b/node_modules/remark-parse/lib/defaults.js deleted file mode 100644 index 37846f39..00000000 --- a/node_modules/remark-parse/lib/defaults.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict'; - -module.exports = { - position: true, - gfm: true, - commonmark: false, - footnotes: false, - pedantic: false, - blocks: require('./block-elements.json') -}; diff --git a/node_modules/remark-parse/lib/locate/break.js b/node_modules/remark-parse/lib/locate/break.js deleted file mode 100644 index 295bdc98..00000000 --- a/node_modules/remark-parse/lib/locate/break.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict'; - -module.exports = locate; - -function locate(value, fromIndex) { - var index = value.indexOf('\n', fromIndex); - - while (index > fromIndex) { - if (value.charAt(index - 1) !== ' ') { - break; - } - - index--; - } - - return index; -} diff --git a/node_modules/remark-parse/lib/locate/code-inline.js b/node_modules/remark-parse/lib/locate/code-inline.js deleted file mode 100644 index 981c8169..00000000 --- a/node_modules/remark-parse/lib/locate/code-inline.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict'; - -module.exports = locate; - -function locate(value, fromIndex) { - return value.indexOf('`', fromIndex); -} diff --git a/node_modules/remark-parse/lib/locate/delete.js b/node_modules/remark-parse/lib/locate/delete.js deleted file mode 100644 index d208aef2..00000000 --- a/node_modules/remark-parse/lib/locate/delete.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict'; - -module.exports = locate; - -function locate(value, fromIndex) { - return value.indexOf('~~', fromIndex); -} diff --git a/node_modules/remark-parse/lib/locate/emphasis.js b/node_modules/remark-parse/lib/locate/emphasis.js deleted file mode 100644 index 6a1f2422..00000000 --- a/node_modules/remark-parse/lib/locate/emphasis.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict'; - -module.exports = locate; - -function locate(value, fromIndex) { - var asterisk = value.indexOf('*', fromIndex); - var underscore = value.indexOf('_', fromIndex); - - if (underscore === -1) { - return asterisk; - } - - if (asterisk === -1) { - return underscore; - } - - return underscore < asterisk ? underscore : asterisk; -} diff --git a/node_modules/remark-parse/lib/locate/escape.js b/node_modules/remark-parse/lib/locate/escape.js deleted file mode 100644 index f6c63715..00000000 --- a/node_modules/remark-parse/lib/locate/escape.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict'; - -module.exports = locate; - -function locate(value, fromIndex) { - return value.indexOf('\\', fromIndex); -} diff --git a/node_modules/remark-parse/lib/locate/link.js b/node_modules/remark-parse/lib/locate/link.js deleted file mode 100644 index 0f16fd80..00000000 --- a/node_modules/remark-parse/lib/locate/link.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict'; - -module.exports = locate; - -function locate(value, fromIndex) { - var link = value.indexOf('[', fromIndex); - var image = value.indexOf('![', fromIndex); - - if (image === -1) { - return link; - } - - /* Link can never be `-1` if an image is found, so we don’t need - * to check for that :) */ - return link < image ? link : image; -} diff --git a/node_modules/remark-parse/lib/locate/strong.js b/node_modules/remark-parse/lib/locate/strong.js deleted file mode 100644 index da1cac0a..00000000 --- a/node_modules/remark-parse/lib/locate/strong.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict'; - -module.exports = locate; - -function locate(value, fromIndex) { - var asterisk = value.indexOf('**', fromIndex); - var underscore = value.indexOf('__', fromIndex); - - if (underscore === -1) { - return asterisk; - } - - if (asterisk === -1) { - return underscore; - } - - return underscore < asterisk ? underscore : asterisk; -} diff --git a/node_modules/remark-parse/lib/locate/tag.js b/node_modules/remark-parse/lib/locate/tag.js deleted file mode 100644 index 3c553426..00000000 --- a/node_modules/remark-parse/lib/locate/tag.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict'; - -module.exports = locate; - -function locate(value, fromIndex) { - return value.indexOf('<', fromIndex); -} diff --git a/node_modules/remark-parse/lib/locate/url.js b/node_modules/remark-parse/lib/locate/url.js deleted file mode 100644 index 59b63e25..00000000 --- a/node_modules/remark-parse/lib/locate/url.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict'; - -module.exports = locate; - -var PROTOCOLS = ['https://', 'http://', 'mailto:']; - -function locate(value, fromIndex) { - var length = PROTOCOLS.length; - var index = -1; - var min = -1; - var position; - - if (!this.options.gfm) { - return -1; - } - - while (++index < length) { - position = value.indexOf(PROTOCOLS[index], fromIndex); - - if (position !== -1 && (position < min || min === -1)) { - min = position; - } - } - - return min; -} diff --git a/node_modules/remark-parse/lib/parse.js b/node_modules/remark-parse/lib/parse.js deleted file mode 100644 index 5a8d8119..00000000 --- a/node_modules/remark-parse/lib/parse.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict'; - -var xtend = require('xtend'); -var removePosition = require('unist-util-remove-position'); - -module.exports = parse; - -var C_NEWLINE = '\n'; -var EXPRESSION_LINE_BREAKS = /\r\n|\r/g; - -/* Parse the bound file. */ -function parse() { - var self = this; - var value = String(self.file); - var start = {line: 1, column: 1, offset: 0}; - var content = xtend(start); - var node; - - /* Clean non-unix newlines: `\r\n` and `\r` are all - * changed to `\n`. This should not affect positional - * information. */ - value = value.replace(EXPRESSION_LINE_BREAKS, C_NEWLINE); - - if (value.charCodeAt(0) === 0xFEFF) { - value = value.slice(1); - - content.column++; - content.offset++; - } - - node = { - type: 'root', - children: self.tokenizeBlock(value, content), - position: { - start: start, - end: self.eof || xtend(start) - } - }; - - if (!self.options.position) { - removePosition(node, true); - } - - return node; -} diff --git a/node_modules/remark-parse/lib/parser.js b/node_modules/remark-parse/lib/parser.js deleted file mode 100644 index 9291109f..00000000 --- a/node_modules/remark-parse/lib/parser.js +++ /dev/null @@ -1,152 +0,0 @@ -'use strict'; - -var xtend = require('xtend'); -var toggle = require('state-toggle'); -var vfileLocation = require('vfile-location'); -var unescape = require('./unescape'); -var decode = require('./decode'); -var tokenizer = require('./tokenizer'); - -module.exports = Parser; - -function Parser(doc, file) { - this.file = file; - this.offset = {}; - this.options = xtend(this.options); - this.setOptions({}); - - this.inList = false; - this.inBlock = false; - this.inLink = false; - this.atStart = true; - - this.toOffset = vfileLocation(file).toOffset; - this.unescape = unescape(this, 'escape'); - this.decode = decode(this); -} - -var proto = Parser.prototype; - -/* Expose core. */ -proto.setOptions = require('./set-options'); -proto.parse = require('./parse'); - -/* Expose `defaults`. */ -proto.options = require('./defaults'); - -/* Enter and exit helpers. */ -proto.exitStart = toggle('atStart', true); -proto.enterList = toggle('inList', false); -proto.enterLink = toggle('inLink', false); -proto.enterBlock = toggle('inBlock', false); - -/* Nodes that can interupt a paragraph: - * - * ```markdown - * A paragraph, followed by a thematic break. - * ___ - * ``` - * - * In the above example, the thematic break “interupts” - * the paragraph. */ -proto.interruptParagraph = [ - ['thematicBreak'], - ['atxHeading'], - ['fencedCode'], - ['blockquote'], - ['html'], - ['setextHeading', {commonmark: false}], - ['definition', {commonmark: false}], - ['footnote', {commonmark: false}] -]; - -/* Nodes that can interupt a list: - * - * ```markdown - * - One - * ___ - * ``` - * - * In the above example, the thematic break “interupts” - * the list. */ -proto.interruptList = [ - ['atxHeading', {pedantic: false}], - ['fencedCode', {pedantic: false}], - ['thematicBreak', {pedantic: false}], - ['definition', {commonmark: false}], - ['footnote', {commonmark: false}] -]; - -/* Nodes that can interupt a blockquote: - * - * ```markdown - * > A paragraph. - * ___ - * ``` - * - * In the above example, the thematic break “interupts” - * the blockquote. */ -proto.interruptBlockquote = [ - ['indentedCode', {commonmark: true}], - ['fencedCode', {commonmark: true}], - ['atxHeading', {commonmark: true}], - ['setextHeading', {commonmark: true}], - ['thematicBreak', {commonmark: true}], - ['html', {commonmark: true}], - ['list', {commonmark: true}], - ['definition', {commonmark: false}], - ['footnote', {commonmark: false}] -]; - -/* Handlers. */ -proto.blockTokenizers = { - newline: require('./tokenize/newline'), - indentedCode: require('./tokenize/code-indented'), - fencedCode: require('./tokenize/code-fenced'), - blockquote: require('./tokenize/blockquote'), - atxHeading: require('./tokenize/heading-atx'), - thematicBreak: require('./tokenize/thematic-break'), - list: require('./tokenize/list'), - setextHeading: require('./tokenize/heading-setext'), - html: require('./tokenize/html-block'), - footnote: require('./tokenize/footnote-definition'), - definition: require('./tokenize/definition'), - table: require('./tokenize/table'), - paragraph: require('./tokenize/paragraph') -}; - -proto.inlineTokenizers = { - escape: require('./tokenize/escape'), - autoLink: require('./tokenize/auto-link'), - url: require('./tokenize/url'), - html: require('./tokenize/html-inline'), - link: require('./tokenize/link'), - reference: require('./tokenize/reference'), - strong: require('./tokenize/strong'), - emphasis: require('./tokenize/emphasis'), - deletion: require('./tokenize/delete'), - code: require('./tokenize/code-inline'), - break: require('./tokenize/break'), - text: require('./tokenize/text') -}; - -/* Expose precedence. */ -proto.blockMethods = keys(proto.blockTokenizers); -proto.inlineMethods = keys(proto.inlineTokenizers); - -/* Tokenizers. */ -proto.tokenizeBlock = tokenizer('block'); -proto.tokenizeInline = tokenizer('inline'); -proto.tokenizeFactory = tokenizer; - -/* Get all keys in `value`. */ -function keys(value) { - var result = []; - var key; - - for (key in value) { - result.push(key); - } - - return result; -} diff --git a/node_modules/remark-parse/lib/set-options.js b/node_modules/remark-parse/lib/set-options.js deleted file mode 100644 index c55f7f32..00000000 --- a/node_modules/remark-parse/lib/set-options.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict'; - -var xtend = require('xtend'); -var escapes = require('markdown-escapes'); -var defaults = require('./defaults'); - -module.exports = setOptions; - -function setOptions(options) { - var self = this; - var current = self.options; - var key; - var value; - - if (options == null) { - options = {}; - } else if (typeof options === 'object') { - options = xtend(options); - } else { - throw new Error( - 'Invalid value `' + options + '` ' + - 'for setting `options`' - ); - } - - for (key in defaults) { - value = options[key]; - - if (value == null) { - value = current[key]; - } - - if ( - (key !== 'blocks' && typeof value !== 'boolean') || - (key === 'blocks' && typeof value !== 'object') - ) { - throw new Error('Invalid value `' + value + '` for setting `options.' + key + '`'); - } - - options[key] = value; - } - - self.options = options; - self.escape = escapes(options); - - return self; -} diff --git a/node_modules/remark-parse/lib/tokenize/auto-link.js b/node_modules/remark-parse/lib/tokenize/auto-link.js deleted file mode 100644 index c945a2c1..00000000 --- a/node_modules/remark-parse/lib/tokenize/auto-link.js +++ /dev/null @@ -1,145 +0,0 @@ -'use strict'; - -var whitespace = require('is-whitespace-character'); -var decode = require('parse-entities'); -var locate = require('../locate/tag'); - -module.exports = autoLink; -autoLink.locator = locate; -autoLink.notInLink = true; - -var C_LT = '<'; -var C_GT = '>'; -var C_AT_SIGN = '@'; -var C_SLASH = '/'; -var MAILTO = 'mailto:'; -var MAILTO_LENGTH = MAILTO.length; - -/* Tokenise a link. */ -function autoLink(eat, value, silent) { - var self; - var subvalue; - var length; - var index; - var queue; - var character; - var hasAtCharacter; - var link; - var now; - var content; - var tokenizers; - var exit; - - if (value.charAt(0) !== C_LT) { - return; - } - - self = this; - subvalue = ''; - length = value.length; - index = 0; - queue = ''; - hasAtCharacter = false; - link = ''; - - index++; - subvalue = C_LT; - - while (index < length) { - character = value.charAt(index); - - if ( - whitespace(character) || - character === C_GT || - character === C_AT_SIGN || - (character === ':' && value.charAt(index + 1) === C_SLASH) - ) { - break; - } - - queue += character; - index++; - } - - if (!queue) { - return; - } - - link += queue; - queue = ''; - - character = value.charAt(index); - link += character; - index++; - - if (character === C_AT_SIGN) { - hasAtCharacter = true; - } else { - if ( - character !== ':' || - value.charAt(index + 1) !== C_SLASH - ) { - return; - } - - link += C_SLASH; - index++; - } - - while (index < length) { - character = value.charAt(index); - - if (whitespace(character) || character === C_GT) { - break; - } - - queue += character; - index++; - } - - character = value.charAt(index); - - if (!queue || character !== C_GT) { - return; - } - - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - link += queue; - content = link; - subvalue += link + character; - now = eat.now(); - now.column++; - now.offset++; - - if (hasAtCharacter) { - if (link.slice(0, MAILTO_LENGTH).toLowerCase() === MAILTO) { - content = content.substr(MAILTO_LENGTH); - now.column += MAILTO_LENGTH; - now.offset += MAILTO_LENGTH; - } else { - link = MAILTO + link; - } - } - - /* Temporarily remove all tokenizers except text in autolinks. */ - tokenizers = self.inlineTokenizers; - self.inlineTokenizers = {text: tokenizers.text}; - - exit = self.enterLink(); - - content = self.tokenizeInline(content, now); - - self.inlineTokenizers = tokenizers; - exit(); - - return eat(subvalue)({ - type: 'link', - title: null, - url: decode(link, {nonTerminated: false}), - children: content - }); -} diff --git a/node_modules/remark-parse/lib/tokenize/blockquote.js b/node_modules/remark-parse/lib/tokenize/blockquote.js deleted file mode 100644 index bd700d6a..00000000 --- a/node_modules/remark-parse/lib/tokenize/blockquote.js +++ /dev/null @@ -1,129 +0,0 @@ -'use strict'; - -var trim = require('trim'); -var interrupt = require('../util/interrupt'); - -module.exports = blockquote; - -var C_NEWLINE = '\n'; -var C_TAB = '\t'; -var C_SPACE = ' '; -var C_GT = '>'; - -/* Tokenise a blockquote. */ -function blockquote(eat, value, silent) { - var self = this; - var offsets = self.offset; - var tokenizers = self.blockTokenizers; - var interruptors = self.interruptBlockquote; - var now = eat.now(); - var currentLine = now.line; - var length = value.length; - var values = []; - var contents = []; - var indents = []; - var add; - var index = 0; - var character; - var rest; - var nextIndex; - var content; - var line; - var startIndex; - var prefixed; - var exit; - - while (index < length) { - character = value.charAt(index); - - if (character !== C_SPACE && character !== C_TAB) { - break; - } - - index++; - } - - if (value.charAt(index) !== C_GT) { - return; - } - - if (silent) { - return true; - } - - index = 0; - - while (index < length) { - nextIndex = value.indexOf(C_NEWLINE, index); - startIndex = index; - prefixed = false; - - if (nextIndex === -1) { - nextIndex = length; - } - - while (index < length) { - character = value.charAt(index); - - if (character !== C_SPACE && character !== C_TAB) { - break; - } - - index++; - } - - if (value.charAt(index) === C_GT) { - index++; - prefixed = true; - - if (value.charAt(index) === C_SPACE) { - index++; - } - } else { - index = startIndex; - } - - content = value.slice(index, nextIndex); - - if (!prefixed && !trim(content)) { - index = startIndex; - break; - } - - if (!prefixed) { - rest = value.slice(index); - - /* Check if the following code contains a possible - * block. */ - if (interrupt(interruptors, tokenizers, self, [eat, rest, true])) { - break; - } - } - - line = startIndex === index ? content : value.slice(startIndex, nextIndex); - - indents.push(index - startIndex); - values.push(line); - contents.push(content); - - index = nextIndex + 1; - } - - index = -1; - length = indents.length; - add = eat(values.join(C_NEWLINE)); - - while (++index < length) { - offsets[currentLine] = (offsets[currentLine] || 0) + indents[index]; - currentLine++; - } - - exit = self.enterBlock(); - contents = self.tokenizeBlock(contents.join(C_NEWLINE), now); - exit(); - - return add({ - type: 'blockquote', - children: contents - }); -} diff --git a/node_modules/remark-parse/lib/tokenize/break.js b/node_modules/remark-parse/lib/tokenize/break.js deleted file mode 100644 index eb531342..00000000 --- a/node_modules/remark-parse/lib/tokenize/break.js +++ /dev/null @@ -1,40 +0,0 @@ -'use strict'; - -var locate = require('../locate/break'); - -module.exports = hardBreak; -hardBreak.locator = locate; - -var MIN_BREAK_LENGTH = 2; - -function hardBreak(eat, value, silent) { - var length = value.length; - var index = -1; - var queue = ''; - var character; - - while (++index < length) { - character = value.charAt(index); - - if (character === '\n') { - if (index < MIN_BREAK_LENGTH) { - return; - } - - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - queue += character; - - return eat(queue)({type: 'break'}); - } - - if (character !== ' ') { - return; - } - - queue += character; - } -} diff --git a/node_modules/remark-parse/lib/tokenize/code-fenced.js b/node_modules/remark-parse/lib/tokenize/code-fenced.js deleted file mode 100644 index 65f2bc73..00000000 --- a/node_modules/remark-parse/lib/tokenize/code-fenced.js +++ /dev/null @@ -1,236 +0,0 @@ -'use strict'; - -var trim = require('trim-trailing-lines'); - -module.exports = fencedCode; - -var C_NEWLINE = '\n'; -var C_TAB = '\t'; -var C_SPACE = ' '; -var C_TILDE = '~'; -var C_TICK = '`'; - -var MIN_FENCE_COUNT = 3; -var CODE_INDENT_COUNT = 4; - -function fencedCode(eat, value, silent) { - var self = this; - var settings = self.options; - var length = value.length + 1; - var index = 0; - var subvalue = ''; - var fenceCount; - var marker; - var character; - var flag; - var queue; - var content; - var exdentedContent; - var closing; - var exdentedClosing; - var indent; - var now; - - if (!settings.gfm) { - return; - } - - /* Eat initial spacing. */ - while (index < length) { - character = value.charAt(index); - - if (character !== C_SPACE && character !== C_TAB) { - break; - } - - subvalue += character; - index++; - } - - indent = index; - - /* Eat the fence. */ - character = value.charAt(index); - - if (character !== C_TILDE && character !== C_TICK) { - return; - } - - index++; - marker = character; - fenceCount = 1; - subvalue += character; - - while (index < length) { - character = value.charAt(index); - - if (character !== marker) { - break; - } - - subvalue += character; - fenceCount++; - index++; - } - - if (fenceCount < MIN_FENCE_COUNT) { - return; - } - - /* Eat spacing before flag. */ - while (index < length) { - character = value.charAt(index); - - if (character !== C_SPACE && character !== C_TAB) { - break; - } - - subvalue += character; - index++; - } - - /* Eat flag. */ - flag = ''; - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if ( - character === C_NEWLINE || - character === C_TILDE || - character === C_TICK - ) { - break; - } - - if (character === C_SPACE || character === C_TAB) { - queue += character; - } else { - flag += queue + character; - queue = ''; - } - - index++; - } - - character = value.charAt(index); - - if (character && character !== C_NEWLINE) { - return; - } - - if (silent) { - return true; - } - - now = eat.now(); - now.column += subvalue.length; - now.offset += subvalue.length; - - subvalue += flag; - flag = self.decode.raw(self.unescape(flag), now); - - if (queue) { - subvalue += queue; - } - - queue = ''; - closing = ''; - exdentedClosing = ''; - content = ''; - exdentedContent = ''; - - /* Eat content. */ - while (index < length) { - character = value.charAt(index); - content += closing; - exdentedContent += exdentedClosing; - closing = ''; - exdentedClosing = ''; - - if (character !== C_NEWLINE) { - content += character; - exdentedClosing += character; - index++; - continue; - } - - /* Add the newline to `subvalue` if its the first - * character. Otherwise, add it to the `closing` - * queue. */ - if (content) { - closing += character; - exdentedClosing += character; - } else { - subvalue += character; - } - - queue = ''; - index++; - - while (index < length) { - character = value.charAt(index); - - if (character !== C_SPACE) { - break; - } - - queue += character; - index++; - } - - closing += queue; - exdentedClosing += queue.slice(indent); - - if (queue.length >= CODE_INDENT_COUNT) { - continue; - } - - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if (character !== marker) { - break; - } - - queue += character; - index++; - } - - closing += queue; - exdentedClosing += queue; - - if (queue.length < fenceCount) { - continue; - } - - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if (character !== C_SPACE && character !== C_TAB) { - break; - } - - closing += character; - exdentedClosing += character; - index++; - } - - if (!character || character === C_NEWLINE) { - break; - } - } - - subvalue += content + closing; - - return eat(subvalue)({ - type: 'code', - lang: flag || null, - value: trim(exdentedContent) - }); -} diff --git a/node_modules/remark-parse/lib/tokenize/code-indented.js b/node_modules/remark-parse/lib/tokenize/code-indented.js deleted file mode 100644 index c73849d9..00000000 --- a/node_modules/remark-parse/lib/tokenize/code-indented.js +++ /dev/null @@ -1,98 +0,0 @@ -'use strict'; - -var repeat = require('repeat-string'); -var trim = require('trim-trailing-lines'); - -module.exports = indentedCode; - -var C_NEWLINE = '\n'; -var C_TAB = '\t'; -var C_SPACE = ' '; - -var CODE_INDENT_COUNT = 4; -var CODE_INDENT = repeat(C_SPACE, CODE_INDENT_COUNT); - -/* Tokenise indented code. */ -function indentedCode(eat, value, silent) { - var index = -1; - var length = value.length; - var subvalue = ''; - var content = ''; - var subvalueQueue = ''; - var contentQueue = ''; - var character; - var blankQueue; - var indent; - - while (++index < length) { - character = value.charAt(index); - - if (indent) { - indent = false; - - subvalue += subvalueQueue; - content += contentQueue; - subvalueQueue = ''; - contentQueue = ''; - - if (character === C_NEWLINE) { - subvalueQueue = character; - contentQueue = character; - } else { - subvalue += character; - content += character; - - while (++index < length) { - character = value.charAt(index); - - if (!character || character === C_NEWLINE) { - contentQueue = character; - subvalueQueue = character; - break; - } - - subvalue += character; - content += character; - } - } - } else if ( - character === C_SPACE && - value.charAt(index + 1) === character && - value.charAt(index + 2) === character && - value.charAt(index + 3) === character - ) { - subvalueQueue += CODE_INDENT; - index += 3; - indent = true; - } else if (character === C_TAB) { - subvalueQueue += character; - indent = true; - } else { - blankQueue = ''; - - while (character === C_TAB || character === C_SPACE) { - blankQueue += character; - character = value.charAt(++index); - } - - if (character !== C_NEWLINE) { - break; - } - - subvalueQueue += blankQueue + character; - contentQueue += character; - } - } - - if (content) { - if (silent) { - return true; - } - - return eat(subvalue)({ - type: 'code', - lang: null, - value: trim(content) - }); - } -} diff --git a/node_modules/remark-parse/lib/tokenize/code-inline.js b/node_modules/remark-parse/lib/tokenize/code-inline.js deleted file mode 100644 index c0a496b4..00000000 --- a/node_modules/remark-parse/lib/tokenize/code-inline.js +++ /dev/null @@ -1,112 +0,0 @@ -'use strict'; - -var whitespace = require('is-whitespace-character'); -var locate = require('../locate/code-inline'); - -module.exports = inlineCode; -inlineCode.locator = locate; - -var C_TICK = '`'; - -/* Tokenise inline code. */ -function inlineCode(eat, value, silent) { - var length = value.length; - var index = 0; - var queue = ''; - var tickQueue = ''; - var contentQueue; - var subqueue; - var count; - var openingCount; - var subvalue; - var character; - var found; - var next; - - while (index < length) { - if (value.charAt(index) !== C_TICK) { - break; - } - - queue += C_TICK; - index++; - } - - if (!queue) { - return; - } - - subvalue = queue; - openingCount = index; - queue = ''; - next = value.charAt(index); - count = 0; - - while (index < length) { - character = next; - next = value.charAt(index + 1); - - if (character === C_TICK) { - count++; - tickQueue += character; - } else { - count = 0; - queue += character; - } - - if (count && next !== C_TICK) { - if (count === openingCount) { - subvalue += queue + tickQueue; - found = true; - break; - } - - queue += tickQueue; - tickQueue = ''; - } - - index++; - } - - if (!found) { - if (openingCount % 2 !== 0) { - return; - } - - queue = ''; - } - - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - contentQueue = ''; - subqueue = ''; - length = queue.length; - index = -1; - - while (++index < length) { - character = queue.charAt(index); - - if (whitespace(character)) { - subqueue += character; - continue; - } - - if (subqueue) { - if (contentQueue) { - contentQueue += subqueue; - } - - subqueue = ''; - } - - contentQueue += character; - } - - return eat(subvalue)({ - type: 'inlineCode', - value: contentQueue - }); -} diff --git a/node_modules/remark-parse/lib/tokenize/definition.js b/node_modules/remark-parse/lib/tokenize/definition.js deleted file mode 100644 index 1cce274c..00000000 --- a/node_modules/remark-parse/lib/tokenize/definition.js +++ /dev/null @@ -1,278 +0,0 @@ -'use strict'; - -var whitespace = require('is-whitespace-character'); -var normalize = require('../util/normalize'); - -module.exports = definition; -definition.notInList = true; -definition.notInBlock = true; - -var C_DOUBLE_QUOTE = '"'; -var C_SINGLE_QUOTE = '\''; -var C_BACKSLASH = '\\'; -var C_NEWLINE = '\n'; -var C_TAB = '\t'; -var C_SPACE = ' '; -var C_BRACKET_OPEN = '['; -var C_BRACKET_CLOSE = ']'; -var C_PAREN_OPEN = '('; -var C_PAREN_CLOSE = ')'; -var C_COLON = ':'; -var C_LT = '<'; -var C_GT = '>'; - -function definition(eat, value, silent) { - var self = this; - var commonmark = self.options.commonmark; - var index = 0; - var length = value.length; - var subvalue = ''; - var beforeURL; - var beforeTitle; - var queue; - var character; - var test; - var identifier; - var url; - var title; - - while (index < length) { - character = value.charAt(index); - - if (character !== C_SPACE && character !== C_TAB) { - break; - } - - subvalue += character; - index++; - } - - character = value.charAt(index); - - if (character !== C_BRACKET_OPEN) { - return; - } - - index++; - subvalue += character; - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if (character === C_BRACKET_CLOSE) { - break; - } else if (character === C_BACKSLASH) { - queue += character; - index++; - character = value.charAt(index); - } - - queue += character; - index++; - } - - if ( - !queue || - value.charAt(index) !== C_BRACKET_CLOSE || - value.charAt(index + 1) !== C_COLON - ) { - return; - } - - identifier = queue; - subvalue += queue + C_BRACKET_CLOSE + C_COLON; - index = subvalue.length; - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if ( - character !== C_TAB && - character !== C_SPACE && - character !== C_NEWLINE - ) { - break; - } - - subvalue += character; - index++; - } - - character = value.charAt(index); - queue = ''; - beforeURL = subvalue; - - if (character === C_LT) { - index++; - - while (index < length) { - character = value.charAt(index); - - if (!isEnclosedURLCharacter(character)) { - break; - } - - queue += character; - index++; - } - - character = value.charAt(index); - - if (character === isEnclosedURLCharacter.delimiter) { - subvalue += C_LT + queue + character; - index++; - } else { - if (commonmark) { - return; - } - - index -= queue.length + 1; - queue = ''; - } - } - - if (!queue) { - while (index < length) { - character = value.charAt(index); - - if (!isUnclosedURLCharacter(character)) { - break; - } - - queue += character; - index++; - } - - subvalue += queue; - } - - if (!queue) { - return; - } - - url = queue; - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if ( - character !== C_TAB && - character !== C_SPACE && - character !== C_NEWLINE - ) { - break; - } - - queue += character; - index++; - } - - character = value.charAt(index); - test = null; - - if (character === C_DOUBLE_QUOTE) { - test = C_DOUBLE_QUOTE; - } else if (character === C_SINGLE_QUOTE) { - test = C_SINGLE_QUOTE; - } else if (character === C_PAREN_OPEN) { - test = C_PAREN_CLOSE; - } - - if (!test) { - queue = ''; - index = subvalue.length; - } else if (queue) { - subvalue += queue + character; - index = subvalue.length; - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if (character === test) { - break; - } - - if (character === C_NEWLINE) { - index++; - character = value.charAt(index); - - if (character === C_NEWLINE || character === test) { - return; - } - - queue += C_NEWLINE; - } - - queue += character; - index++; - } - - character = value.charAt(index); - - if (character !== test) { - return; - } - - beforeTitle = subvalue; - subvalue += queue + character; - index++; - title = queue; - queue = ''; - } else { - return; - } - - while (index < length) { - character = value.charAt(index); - - if (character !== C_TAB && character !== C_SPACE) { - break; - } - - subvalue += character; - index++; - } - - character = value.charAt(index); - - if (!character || character === C_NEWLINE) { - if (silent) { - return true; - } - - beforeURL = eat(beforeURL).test().end; - url = self.decode.raw(self.unescape(url), beforeURL, {nonTerminated: false}); - - if (title) { - beforeTitle = eat(beforeTitle).test().end; - title = self.decode.raw(self.unescape(title), beforeTitle); - } - - return eat(subvalue)({ - type: 'definition', - identifier: normalize(identifier), - title: title || null, - url: url - }); - } -} - -/* Check if `character` can be inside an enclosed URI. */ -function isEnclosedURLCharacter(character) { - return character !== C_GT && - character !== C_BRACKET_OPEN && - character !== C_BRACKET_CLOSE; -} - -isEnclosedURLCharacter.delimiter = C_GT; - -/* Check if `character` can be inside an unclosed URI. */ -function isUnclosedURLCharacter(character) { - return character !== C_BRACKET_OPEN && - character !== C_BRACKET_CLOSE && - !whitespace(character); -} diff --git a/node_modules/remark-parse/lib/tokenize/delete.js b/node_modules/remark-parse/lib/tokenize/delete.js deleted file mode 100644 index ca7c68a8..00000000 --- a/node_modules/remark-parse/lib/tokenize/delete.js +++ /dev/null @@ -1,60 +0,0 @@ -'use strict'; - -var whitespace = require('is-whitespace-character'); -var locate = require('../locate/delete'); - -module.exports = strikethrough; -strikethrough.locator = locate; - -var C_TILDE = '~'; -var DOUBLE = '~~'; - -function strikethrough(eat, value, silent) { - var self = this; - var character = ''; - var previous = ''; - var preceding = ''; - var subvalue = ''; - var index; - var length; - var now; - - if ( - !self.options.gfm || - value.charAt(0) !== C_TILDE || - value.charAt(1) !== C_TILDE || - whitespace(value.charAt(2)) - ) { - return; - } - - index = 1; - length = value.length; - now = eat.now(); - now.column += 2; - now.offset += 2; - - while (++index < length) { - character = value.charAt(index); - - if ( - character === C_TILDE && - previous === C_TILDE && - (!preceding || !whitespace(preceding)) - ) { - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - return eat(DOUBLE + subvalue + DOUBLE)({ - type: 'delete', - children: self.tokenizeInline(subvalue, now) - }); - } - - subvalue += previous; - preceding = previous; - previous = character; - } -} diff --git a/node_modules/remark-parse/lib/tokenize/emphasis.js b/node_modules/remark-parse/lib/tokenize/emphasis.js deleted file mode 100644 index b2c87b44..00000000 --- a/node_modules/remark-parse/lib/tokenize/emphasis.js +++ /dev/null @@ -1,85 +0,0 @@ -'use strict'; - -var trim = require('trim'); -var word = require('is-word-character'); -var whitespace = require('is-whitespace-character'); -var locate = require('../locate/emphasis'); - -module.exports = emphasis; -emphasis.locator = locate; - -var C_ASTERISK = '*'; -var C_UNDERSCORE = '_'; - -function emphasis(eat, value, silent) { - var self = this; - var index = 0; - var character = value.charAt(index); - var now; - var pedantic; - var marker; - var queue; - var subvalue; - var length; - var prev; - - if (character !== C_ASTERISK && character !== C_UNDERSCORE) { - return; - } - - pedantic = self.options.pedantic; - subvalue = character; - marker = character; - length = value.length; - index++; - queue = ''; - character = ''; - - if (pedantic && whitespace(value.charAt(index))) { - return; - } - - while (index < length) { - prev = character; - character = value.charAt(index); - - if (character === marker && (!pedantic || !whitespace(prev))) { - character = value.charAt(++index); - - if (character !== marker) { - if (!trim(queue) || prev === marker) { - return; - } - - if (!pedantic && marker === C_UNDERSCORE && word(character)) { - queue += marker; - continue; - } - - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - now = eat.now(); - now.column++; - now.offset++; - - return eat(subvalue + queue + marker)({ - type: 'emphasis', - children: self.tokenizeInline(queue, now) - }); - } - - queue += marker; - } - - if (!pedantic && character === '\\') { - queue += character; - character = value.charAt(++index); - } - - queue += character; - index++; - } -} diff --git a/node_modules/remark-parse/lib/tokenize/escape.js b/node_modules/remark-parse/lib/tokenize/escape.js deleted file mode 100644 index d6f99bcc..00000000 --- a/node_modules/remark-parse/lib/tokenize/escape.js +++ /dev/null @@ -1,34 +0,0 @@ -'use strict'; - -var locate = require('../locate/escape'); - -module.exports = escape; -escape.locator = locate; - -function escape(eat, value, silent) { - var self = this; - var character; - var node; - - if (value.charAt(0) === '\\') { - character = value.charAt(1); - - if (self.escape.indexOf(character) !== -1) { - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - if (character === '\n') { - node = {type: 'break'}; - } else { - node = { - type: 'text', - value: character - }; - } - - return eat('\\' + character)(node); - } - } -} diff --git a/node_modules/remark-parse/lib/tokenize/footnote-definition.js b/node_modules/remark-parse/lib/tokenize/footnote-definition.js deleted file mode 100644 index f48ff9bb..00000000 --- a/node_modules/remark-parse/lib/tokenize/footnote-definition.js +++ /dev/null @@ -1,185 +0,0 @@ -'use strict'; - -var whitespace = require('is-whitespace-character'); -var normalize = require('../util/normalize'); - -module.exports = footnoteDefinition; -footnoteDefinition.notInList = true; -footnoteDefinition.notInBlock = true; - -var C_BACKSLASH = '\\'; -var C_NEWLINE = '\n'; -var C_TAB = '\t'; -var C_SPACE = ' '; -var C_BRACKET_OPEN = '['; -var C_BRACKET_CLOSE = ']'; -var C_CARET = '^'; -var C_COLON = ':'; - -var EXPRESSION_INITIAL_TAB = /^( {4}|\t)?/gm; - -function footnoteDefinition(eat, value, silent) { - var self = this; - var offsets = self.offset; - var index; - var length; - var subvalue; - var now; - var currentLine; - var content; - var queue; - var subqueue; - var character; - var identifier; - var add; - var exit; - - if (!self.options.footnotes) { - return; - } - - index = 0; - length = value.length; - subvalue = ''; - now = eat.now(); - currentLine = now.line; - - while (index < length) { - character = value.charAt(index); - - if (!whitespace(character)) { - break; - } - - subvalue += character; - index++; - } - - if ( - value.charAt(index) !== C_BRACKET_OPEN || - value.charAt(index + 1) !== C_CARET - ) { - return; - } - - subvalue += C_BRACKET_OPEN + C_CARET; - index = subvalue.length; - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if (character === C_BRACKET_CLOSE) { - break; - } else if (character === C_BACKSLASH) { - queue += character; - index++; - character = value.charAt(index); - } - - queue += character; - index++; - } - - if ( - !queue || - value.charAt(index) !== C_BRACKET_CLOSE || - value.charAt(index + 1) !== C_COLON - ) { - return; - } - - if (silent) { - return true; - } - - identifier = normalize(queue); - subvalue += queue + C_BRACKET_CLOSE + C_COLON; - index = subvalue.length; - - while (index < length) { - character = value.charAt(index); - - if (character !== C_TAB && character !== C_SPACE) { - break; - } - - subvalue += character; - index++; - } - - now.column += subvalue.length; - now.offset += subvalue.length; - queue = ''; - content = ''; - subqueue = ''; - - while (index < length) { - character = value.charAt(index); - - if (character === C_NEWLINE) { - subqueue = character; - index++; - - while (index < length) { - character = value.charAt(index); - - if (character !== C_NEWLINE) { - break; - } - - subqueue += character; - index++; - } - - queue += subqueue; - subqueue = ''; - - while (index < length) { - character = value.charAt(index); - - if (character !== C_SPACE) { - break; - } - - subqueue += character; - index++; - } - - if (subqueue.length === 0) { - break; - } - - queue += subqueue; - } - - if (queue) { - content += queue; - queue = ''; - } - - content += character; - index++; - } - - subvalue += content; - - content = content.replace(EXPRESSION_INITIAL_TAB, function (line) { - offsets[currentLine] = (offsets[currentLine] || 0) + line.length; - currentLine++; - - return ''; - }); - - add = eat(subvalue); - - exit = self.enterBlock(); - content = self.tokenizeBlock(content, now); - exit(); - - return add({ - type: 'footnoteDefinition', - identifier: identifier, - children: content - }); -} diff --git a/node_modules/remark-parse/lib/tokenize/heading-atx.js b/node_modules/remark-parse/lib/tokenize/heading-atx.js deleted file mode 100644 index aafeabb5..00000000 --- a/node_modules/remark-parse/lib/tokenize/heading-atx.js +++ /dev/null @@ -1,141 +0,0 @@ -'use strict'; - -module.exports = atxHeading; - -var C_NEWLINE = '\n'; -var C_TAB = '\t'; -var C_SPACE = ' '; -var C_HASH = '#'; - -var MAX_ATX_COUNT = 6; - -function atxHeading(eat, value, silent) { - var self = this; - var settings = self.options; - var length = value.length + 1; - var index = -1; - var now = eat.now(); - var subvalue = ''; - var content = ''; - var character; - var queue; - var depth; - - /* Eat initial spacing. */ - while (++index < length) { - character = value.charAt(index); - - if (character !== C_SPACE && character !== C_TAB) { - index--; - break; - } - - subvalue += character; - } - - /* Eat hashes. */ - depth = 0; - - while (++index <= length) { - character = value.charAt(index); - - if (character !== C_HASH) { - index--; - break; - } - - subvalue += character; - depth++; - } - - if (depth > MAX_ATX_COUNT) { - return; - } - - if ( - !depth || - (!settings.pedantic && value.charAt(index + 1) === C_HASH) - ) { - return; - } - - length = value.length + 1; - - /* Eat intermediate white-space. */ - queue = ''; - - while (++index < length) { - character = value.charAt(index); - - if (character !== C_SPACE && character !== C_TAB) { - index--; - break; - } - - queue += character; - } - - /* Exit when not in pedantic mode without spacing. */ - if ( - !settings.pedantic && - queue.length === 0 && - character && - character !== C_NEWLINE - ) { - return; - } - - if (silent) { - return true; - } - - /* Eat content. */ - subvalue += queue; - queue = ''; - content = ''; - - while (++index < length) { - character = value.charAt(index); - - if (!character || character === C_NEWLINE) { - break; - } - - if ( - character !== C_SPACE && - character !== C_TAB && - character !== C_HASH - ) { - content += queue + character; - queue = ''; - continue; - } - - while (character === C_SPACE || character === C_TAB) { - queue += character; - character = value.charAt(++index); - } - - while (character === C_HASH) { - queue += character; - character = value.charAt(++index); - } - - while (character === C_SPACE || character === C_TAB) { - queue += character; - character = value.charAt(++index); - } - - index--; - } - - now.column += subvalue.length; - now.offset += subvalue.length; - subvalue += content + queue; - - return eat(subvalue)({ - type: 'heading', - depth: depth, - children: self.tokenizeInline(content, now) - }); -} diff --git a/node_modules/remark-parse/lib/tokenize/heading-setext.js b/node_modules/remark-parse/lib/tokenize/heading-setext.js deleted file mode 100644 index 96c6130d..00000000 --- a/node_modules/remark-parse/lib/tokenize/heading-setext.js +++ /dev/null @@ -1,107 +0,0 @@ -'use strict'; - -module.exports = setextHeading; - -var C_NEWLINE = '\n'; -var C_TAB = '\t'; -var C_SPACE = ' '; -var C_EQUALS = '='; -var C_DASH = '-'; - -var MAX_HEADING_INDENT = 3; - -/* Map of characters which can be used to mark setext - * headers, mapping to their corresponding depth. */ -var SETEXT_MARKERS = {}; - -SETEXT_MARKERS[C_EQUALS] = 1; -SETEXT_MARKERS[C_DASH] = 2; - -function setextHeading(eat, value, silent) { - var self = this; - var now = eat.now(); - var length = value.length; - var index = -1; - var subvalue = ''; - var content; - var queue; - var character; - var marker; - var depth; - - /* Eat initial indentation. */ - while (++index < length) { - character = value.charAt(index); - - if (character !== C_SPACE || index >= MAX_HEADING_INDENT) { - index--; - break; - } - - subvalue += character; - } - - /* Eat content. */ - content = ''; - queue = ''; - - while (++index < length) { - character = value.charAt(index); - - if (character === C_NEWLINE) { - index--; - break; - } - - if (character === C_SPACE || character === C_TAB) { - queue += character; - } else { - content += queue + character; - queue = ''; - } - } - - now.column += subvalue.length; - now.offset += subvalue.length; - subvalue += content + queue; - - /* Ensure the content is followed by a newline and a - * valid marker. */ - character = value.charAt(++index); - marker = value.charAt(++index); - - if (character !== C_NEWLINE || !SETEXT_MARKERS[marker]) { - return; - } - - subvalue += character; - - /* Eat Setext-line. */ - queue = marker; - depth = SETEXT_MARKERS[marker]; - - while (++index < length) { - character = value.charAt(index); - - if (character !== marker) { - if (character !== C_NEWLINE) { - return; - } - - index--; - break; - } - - queue += character; - } - - if (silent) { - return true; - } - - return eat(subvalue + queue)({ - type: 'heading', - depth: depth, - children: self.tokenizeInline(content, now) - }); -} diff --git a/node_modules/remark-parse/lib/tokenize/html-block.js b/node_modules/remark-parse/lib/tokenize/html-block.js deleted file mode 100644 index 6e81eb29..00000000 --- a/node_modules/remark-parse/lib/tokenize/html-block.js +++ /dev/null @@ -1,94 +0,0 @@ -'use strict'; - -var openCloseTag = require('../util/html').openCloseTag; - -module.exports = blockHTML; - -var C_TAB = '\t'; -var C_SPACE = ' '; -var C_NEWLINE = '\n'; -var C_LT = '<'; - -function blockHTML(eat, value, silent) { - var self = this; - var blocks = self.options.blocks; - var length = value.length; - var index = 0; - var next; - var line; - var offset; - var character; - var count; - var sequence; - var subvalue; - - var sequences = [ - [/^<(script|pre|style)(?=(\s|>|$))/i, /<\/(script|pre|style)>/i, true], - [/^<!--/, /-->/, true], - [/^<\?/, /\?>/, true], - [/^<![A-Za-z]/, />/, true], - [/^<!\[CDATA\[/, /\]\]>/, true], - [new RegExp('^</?(' + blocks.join('|') + ')(?=(\\s|/?>|$))', 'i'), /^$/, true], - [new RegExp(openCloseTag.source + '\\s*$'), /^$/, false] - ]; - - /* Eat initial spacing. */ - while (index < length) { - character = value.charAt(index); - - if (character !== C_TAB && character !== C_SPACE) { - break; - } - - index++; - } - - if (value.charAt(index) !== C_LT) { - return; - } - - next = value.indexOf(C_NEWLINE, index + 1); - next = next === -1 ? length : next; - line = value.slice(index, next); - offset = -1; - count = sequences.length; - - while (++offset < count) { - if (sequences[offset][0].test(line)) { - sequence = sequences[offset]; - break; - } - } - - if (!sequence) { - return; - } - - if (silent) { - return sequence[2]; - } - - index = next; - - if (!sequence[1].test(line)) { - while (index < length) { - next = value.indexOf(C_NEWLINE, index + 1); - next = next === -1 ? length : next; - line = value.slice(index + 1, next); - - if (sequence[1].test(line)) { - if (line) { - index = next; - } - - break; - } - - index = next; - } - } - - subvalue = value.slice(0, index); - - return eat(subvalue)({type: 'html', value: subvalue}); -} diff --git a/node_modules/remark-parse/lib/tokenize/html-inline.js b/node_modules/remark-parse/lib/tokenize/html-inline.js deleted file mode 100644 index c204e962..00000000 --- a/node_modules/remark-parse/lib/tokenize/html-inline.js +++ /dev/null @@ -1,54 +0,0 @@ -'use strict'; - -var alphabetical = require('is-alphabetical'); -var locate = require('../locate/tag'); -var tag = require('../util/html').tag; - -module.exports = inlineHTML; -inlineHTML.locator = locate; - -var EXPRESSION_HTML_LINK_OPEN = /^<a /i; -var EXPRESSION_HTML_LINK_CLOSE = /^<\/a>/i; - -function inlineHTML(eat, value, silent) { - var self = this; - var length = value.length; - var character; - var subvalue; - - if (value.charAt(0) !== '<' || length < 3) { - return; - } - - character = value.charAt(1); - - if ( - !alphabetical(character) && - character !== '?' && - character !== '!' && - character !== '/' - ) { - return; - } - - subvalue = value.match(tag); - - if (!subvalue) { - return; - } - - /* istanbul ignore if - not used yet. */ - if (silent) { - return true; - } - - subvalue = subvalue[0]; - - if (!self.inLink && EXPRESSION_HTML_LINK_OPEN.test(subvalue)) { - self.inLink = true; - } else if (self.inLink && EXPRESSION_HTML_LINK_CLOSE.test(subvalue)) { - self.inLink = false; - } - - return eat(subvalue)({type: 'html', value: subvalue}); -} diff --git a/node_modules/remark-parse/lib/tokenize/link.js b/node_modules/remark-parse/lib/tokenize/link.js deleted file mode 100644 index 3ef5e1ba..00000000 --- a/node_modules/remark-parse/lib/tokenize/link.js +++ /dev/null @@ -1,392 +0,0 @@ -'use strict'; - -var whitespace = require('is-whitespace-character'); -var locate = require('../locate/link'); - -module.exports = link; -link.locator = locate; - -var own = {}.hasOwnProperty; - -var C_BACKSLASH = '\\'; -var C_BRACKET_OPEN = '['; -var C_BRACKET_CLOSE = ']'; -var C_PAREN_OPEN = '('; -var C_PAREN_CLOSE = ')'; -var C_LT = '<'; -var C_GT = '>'; -var C_TICK = '`'; -var C_DOUBLE_QUOTE = '"'; -var C_SINGLE_QUOTE = '\''; - -/* Map of characters, which can be used to mark link - * and image titles. */ -var LINK_MARKERS = {}; - -LINK_MARKERS[C_DOUBLE_QUOTE] = C_DOUBLE_QUOTE; -LINK_MARKERS[C_SINGLE_QUOTE] = C_SINGLE_QUOTE; - -/* Map of characters, which can be used to mark link - * and image titles in commonmark-mode. */ -var COMMONMARK_LINK_MARKERS = {}; - -COMMONMARK_LINK_MARKERS[C_DOUBLE_QUOTE] = C_DOUBLE_QUOTE; -COMMONMARK_LINK_MARKERS[C_SINGLE_QUOTE] = C_SINGLE_QUOTE; -COMMONMARK_LINK_MARKERS[C_PAREN_OPEN] = C_PAREN_CLOSE; - -function link(eat, value, silent) { - var self = this; - var subvalue = ''; - var index = 0; - var character = value.charAt(0); - var pedantic = self.options.pedantic; - var commonmark = self.options.commonmark; - var gfm = self.options.gfm; - var closed; - var count; - var opening; - var beforeURL; - var beforeTitle; - var subqueue; - var hasMarker; - var markers; - var isImage; - var content; - var marker; - var length; - var title; - var depth; - var queue; - var url; - var now; - var exit; - var node; - - /* Detect whether this is an image. */ - if (character === '!') { - isImage = true; - subvalue = character; - character = value.charAt(++index); - } - - /* Eat the opening. */ - if (character !== C_BRACKET_OPEN) { - return; - } - - /* Exit when this is a link and we’re already inside - * a link. */ - if (!isImage && self.inLink) { - return; - } - - subvalue += character; - queue = ''; - index++; - - /* Eat the content. */ - length = value.length; - now = eat.now(); - depth = 0; - - now.column += index; - now.offset += index; - - while (index < length) { - character = value.charAt(index); - subqueue = character; - - if (character === C_TICK) { - /* Inline-code in link content. */ - count = 1; - - while (value.charAt(index + 1) === C_TICK) { - subqueue += character; - index++; - count++; - } - - if (!opening) { - opening = count; - } else if (count >= opening) { - opening = 0; - } - } else if (character === C_BACKSLASH) { - /* Allow brackets to be escaped. */ - index++; - subqueue += value.charAt(index); - /* In GFM mode, brackets in code still count. - * In all other modes, they don’t. This empty - * block prevents the next statements are - * entered. */ - } else if ((!opening || gfm) && character === C_BRACKET_OPEN) { - depth++; - } else if ((!opening || gfm) && character === C_BRACKET_CLOSE) { - if (depth) { - depth--; - } else { - /* Allow white-space between content and - * url in GFM mode. */ - if (!pedantic) { - while (index < length) { - character = value.charAt(index + 1); - - if (!whitespace(character)) { - break; - } - - subqueue += character; - index++; - } - } - - if (value.charAt(index + 1) !== C_PAREN_OPEN) { - return; - } - - subqueue += C_PAREN_OPEN; - closed = true; - index++; - - break; - } - } - - queue += subqueue; - subqueue = ''; - index++; - } - - /* Eat the content closing. */ - if (!closed) { - return; - } - - content = queue; - subvalue += queue + subqueue; - index++; - - /* Eat white-space. */ - while (index < length) { - character = value.charAt(index); - - if (!whitespace(character)) { - break; - } - - subvalue += character; - index++; - } - - /* Eat the URL. */ - character = value.charAt(index); - markers = commonmark ? COMMONMARK_LINK_MARKERS : LINK_MARKERS; - queue = ''; - beforeURL = subvalue; - - if (character === C_LT) { - index++; - beforeURL += C_LT; - - while (index < length) { - character = value.charAt(index); - - if (character === C_GT) { - break; - } - - if (commonmark && character === '\n') { - return; - } - - queue += character; - index++; - } - - if (value.charAt(index) !== C_GT) { - return; - } - - subvalue += C_LT + queue + C_GT; - url = queue; - index++; - } else { - character = null; - subqueue = ''; - - while (index < length) { - character = value.charAt(index); - - if (subqueue && own.call(markers, character)) { - break; - } - - if (whitespace(character)) { - if (!pedantic) { - break; - } - - subqueue += character; - } else { - if (character === C_PAREN_OPEN) { - depth++; - } else if (character === C_PAREN_CLOSE) { - if (depth === 0) { - break; - } - - depth--; - } - - queue += subqueue; - subqueue = ''; - - if (character === C_BACKSLASH) { - queue += C_BACKSLASH; - character = value.charAt(++index); - } - - queue += character; - } - - index++; - } - - subvalue += queue; - url = queue; - index = subvalue.length; - } - - /* Eat white-space. */ - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if (!whitespace(character)) { - break; - } - - queue += character; - index++; - } - - character = value.charAt(index); - subvalue += queue; - - /* Eat the title. */ - if (queue && own.call(markers, character)) { - index++; - subvalue += character; - queue = ''; - marker = markers[character]; - beforeTitle = subvalue; - - /* In commonmark-mode, things are pretty easy: the - * marker cannot occur inside the title. - * - * Non-commonmark does, however, support nested - * delimiters. */ - if (commonmark) { - while (index < length) { - character = value.charAt(index); - - if (character === marker) { - break; - } - - if (character === C_BACKSLASH) { - queue += C_BACKSLASH; - character = value.charAt(++index); - } - - index++; - queue += character; - } - - character = value.charAt(index); - - if (character !== marker) { - return; - } - - title = queue; - subvalue += queue + character; - index++; - - while (index < length) { - character = value.charAt(index); - - if (!whitespace(character)) { - break; - } - - subvalue += character; - index++; - } - } else { - subqueue = ''; - - while (index < length) { - character = value.charAt(index); - - if (character === marker) { - if (hasMarker) { - queue += marker + subqueue; - subqueue = ''; - } - - hasMarker = true; - } else if (!hasMarker) { - queue += character; - } else if (character === C_PAREN_CLOSE) { - subvalue += queue + marker + subqueue; - title = queue; - break; - } else if (whitespace(character)) { - subqueue += character; - } else { - queue += marker + subqueue + character; - subqueue = ''; - hasMarker = false; - } - - index++; - } - } - } - - if (value.charAt(index) !== C_PAREN_CLOSE) { - return; - } - - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - subvalue += C_PAREN_CLOSE; - - url = self.decode.raw(self.unescape(url), eat(beforeURL).test().end, {nonTerminated: false}); - - if (title) { - beforeTitle = eat(beforeTitle).test().end; - title = self.decode.raw(self.unescape(title), beforeTitle); - } - - node = { - type: isImage ? 'image' : 'link', - title: title || null, - url: url - }; - - if (isImage) { - node.alt = self.decode.raw(self.unescape(content), now) || null; - } else { - exit = self.enterLink(); - node.children = self.tokenizeInline(content, now); - exit(); - } - - return eat(subvalue)(node); -} diff --git a/node_modules/remark-parse/lib/tokenize/list.js b/node_modules/remark-parse/lib/tokenize/list.js deleted file mode 100644 index 9164c816..00000000 --- a/node_modules/remark-parse/lib/tokenize/list.js +++ /dev/null @@ -1,474 +0,0 @@ -'use strict'; - -/* eslint-disable max-params */ - -var trim = require('trim'); -var repeat = require('repeat-string'); -var decimal = require('is-decimal'); -var getIndent = require('../util/get-indentation'); -var removeIndent = require('../util/remove-indentation'); -var interrupt = require('../util/interrupt'); - -module.exports = list; - -var C_ASTERISK = '*'; -var C_UNDERSCORE = '_'; -var C_PLUS = '+'; -var C_DASH = '-'; -var C_DOT = '.'; -var C_SPACE = ' '; -var C_NEWLINE = '\n'; -var C_TAB = '\t'; -var C_PAREN_CLOSE = ')'; -var C_X_LOWER = 'x'; - -var TAB_SIZE = 4; -var EXPRESSION_LOOSE_LIST_ITEM = /\n\n(?!\s*$)/; -var EXPRESSION_TASK_ITEM = /^\[([ \t]|x|X)][ \t]/; -var EXPRESSION_BULLET = /^([ \t]*)([*+-]|\d+[.)])( {1,4}(?! )| |\t|$|(?=\n))([^\n]*)/; -var EXPRESSION_PEDANTIC_BULLET = /^([ \t]*)([*+-]|\d+[.)])([ \t]+)/; -var EXPRESSION_INITIAL_INDENT = /^( {1,4}|\t)?/gm; - -/* Map of characters which can be used to mark - * list-items. */ -var LIST_UNORDERED_MARKERS = {}; - -LIST_UNORDERED_MARKERS[C_ASTERISK] = true; -LIST_UNORDERED_MARKERS[C_PLUS] = true; -LIST_UNORDERED_MARKERS[C_DASH] = true; - -/* Map of characters which can be used to mark - * list-items after a digit. */ -var LIST_ORDERED_MARKERS = {}; - -LIST_ORDERED_MARKERS[C_DOT] = true; - -/* Map of characters which can be used to mark - * list-items after a digit. */ -var LIST_ORDERED_COMMONMARK_MARKERS = {}; - -LIST_ORDERED_COMMONMARK_MARKERS[C_DOT] = true; -LIST_ORDERED_COMMONMARK_MARKERS[C_PAREN_CLOSE] = true; - -function list(eat, value, silent) { - var self = this; - var commonmark = self.options.commonmark; - var pedantic = self.options.pedantic; - var tokenizers = self.blockTokenizers; - var interuptors = self.interruptList; - var markers; - var index = 0; - var length = value.length; - var start = null; - var size = 0; - var queue; - var ordered; - var character; - var marker; - var nextIndex; - var startIndex; - var prefixed; - var currentMarker; - var content; - var line; - var prevEmpty; - var empty; - var items; - var allLines; - var emptyLines; - var item; - var enterTop; - var exitBlockquote; - var isLoose; - var node; - var now; - var end; - var indented; - - while (index < length) { - character = value.charAt(index); - - if (character === C_TAB) { - size += TAB_SIZE - (size % TAB_SIZE); - } else if (character === C_SPACE) { - size++; - } else { - break; - } - - index++; - } - - if (size >= TAB_SIZE) { - return; - } - - character = value.charAt(index); - - markers = commonmark ? - LIST_ORDERED_COMMONMARK_MARKERS : - LIST_ORDERED_MARKERS; - - if (LIST_UNORDERED_MARKERS[character] === true) { - marker = character; - ordered = false; - } else { - ordered = true; - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if (!decimal(character)) { - break; - } - - queue += character; - index++; - } - - character = value.charAt(index); - - if (!queue || markers[character] !== true) { - return; - } - - start = parseInt(queue, 10); - marker = character; - } - - character = value.charAt(++index); - - if (character !== C_SPACE && character !== C_TAB) { - return; - } - - if (silent) { - return true; - } - - index = 0; - items = []; - allLines = []; - emptyLines = []; - - while (index < length) { - nextIndex = value.indexOf(C_NEWLINE, index); - startIndex = index; - prefixed = false; - indented = false; - - if (nextIndex === -1) { - nextIndex = length; - } - - end = index + TAB_SIZE; - size = 0; - - while (index < length) { - character = value.charAt(index); - - if (character === C_TAB) { - size += TAB_SIZE - (size % TAB_SIZE); - } else if (character === C_SPACE) { - size++; - } else { - break; - } - - index++; - } - - if (size >= TAB_SIZE) { - indented = true; - } - - if (item && size >= item.indent) { - indented = true; - } - - character = value.charAt(index); - currentMarker = null; - - if (!indented) { - if (LIST_UNORDERED_MARKERS[character] === true) { - currentMarker = character; - index++; - size++; - } else { - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if (!decimal(character)) { - break; - } - - queue += character; - index++; - } - - character = value.charAt(index); - index++; - - if (queue && markers[character] === true) { - currentMarker = character; - size += queue.length + 1; - } - } - - if (currentMarker) { - character = value.charAt(index); - - if (character === C_TAB) { - size += TAB_SIZE - (size % TAB_SIZE); - index++; - } else if (character === C_SPACE) { - end = index + TAB_SIZE; - - while (index < end) { - if (value.charAt(index) !== C_SPACE) { - break; - } - - index++; - size++; - } - - if (index === end && value.charAt(index) === C_SPACE) { - index -= TAB_SIZE - 1; - size -= TAB_SIZE - 1; - } - } else if (character !== C_NEWLINE && character !== '') { - currentMarker = null; - } - } - } - - if (currentMarker) { - if (!pedantic && marker !== currentMarker) { - break; - } - - prefixed = true; - } else { - if (!commonmark && !indented && value.charAt(startIndex) === C_SPACE) { - indented = true; - } else if (commonmark && item) { - indented = size >= item.indent || size > TAB_SIZE; - } - - prefixed = false; - index = startIndex; - } - - line = value.slice(startIndex, nextIndex); - content = startIndex === index ? line : value.slice(index, nextIndex); - - if ( - currentMarker === C_ASTERISK || - currentMarker === C_UNDERSCORE || - currentMarker === C_DASH - ) { - if (tokenizers.thematicBreak.call(self, eat, line, true)) { - break; - } - } - - prevEmpty = empty; - empty = !trim(content).length; - - if (indented && item) { - item.value = item.value.concat(emptyLines, line); - allLines = allLines.concat(emptyLines, line); - emptyLines = []; - } else if (prefixed) { - if (emptyLines.length !== 0) { - item.value.push(''); - item.trail = emptyLines.concat(); - } - - item = { - value: [line], - indent: size, - trail: [] - }; - - items.push(item); - allLines = allLines.concat(emptyLines, line); - emptyLines = []; - } else if (empty) { - if (prevEmpty) { - break; - } - - emptyLines.push(line); - } else { - if (prevEmpty) { - break; - } - - if (interrupt(interuptors, tokenizers, self, [eat, line, true])) { - break; - } - - item.value = item.value.concat(emptyLines, line); - allLines = allLines.concat(emptyLines, line); - emptyLines = []; - } - - index = nextIndex + 1; - } - - node = eat(allLines.join(C_NEWLINE)).reset({ - type: 'list', - ordered: ordered, - start: start, - loose: null, - children: [] - }); - - enterTop = self.enterList(); - exitBlockquote = self.enterBlock(); - isLoose = false; - index = -1; - length = items.length; - - while (++index < length) { - item = items[index].value.join(C_NEWLINE); - now = eat.now(); - - item = eat(item)(listItem(self, item, now), node); - - if (item.loose) { - isLoose = true; - } - - item = items[index].trail.join(C_NEWLINE); - - if (index !== length - 1) { - item += C_NEWLINE; - } - - eat(item); - } - - enterTop(); - exitBlockquote(); - - node.loose = isLoose; - - return node; -} - -function listItem(ctx, value, position) { - var offsets = ctx.offset; - var fn = ctx.options.pedantic ? pedanticListItem : normalListItem; - var checked = null; - var task; - var indent; - - value = fn.apply(null, arguments); - - if (ctx.options.gfm) { - task = value.match(EXPRESSION_TASK_ITEM); - - if (task) { - indent = task[0].length; - checked = task[1].toLowerCase() === C_X_LOWER; - offsets[position.line] += indent; - value = value.slice(indent); - } - } - - return { - type: 'listItem', - loose: EXPRESSION_LOOSE_LIST_ITEM.test(value) || - value.charAt(value.length - 1) === C_NEWLINE, - checked: checked, - children: ctx.tokenizeBlock(value, position) - }; -} - -/* Create a list-item using overly simple mechanics. */ -function pedanticListItem(ctx, value, position) { - var offsets = ctx.offset; - var line = position.line; - - /* Remove the list-item’s bullet. */ - value = value.replace(EXPRESSION_PEDANTIC_BULLET, replacer); - - /* The initial line was also matched by the below, so - * we reset the `line`. */ - line = position.line; - - return value.replace(EXPRESSION_INITIAL_INDENT, replacer); - - /* A simple replacer which removed all matches, - * and adds their length to `offset`. */ - function replacer($0) { - offsets[line] = (offsets[line] || 0) + $0.length; - line++; - - return ''; - } -} - -/* Create a list-item using sane mechanics. */ -function normalListItem(ctx, value, position) { - var offsets = ctx.offset; - var line = position.line; - var max; - var bullet; - var rest; - var lines; - var trimmedLines; - var index; - var length; - - /* Remove the list-item’s bullet. */ - value = value.replace(EXPRESSION_BULLET, replacer); - - lines = value.split(C_NEWLINE); - - trimmedLines = removeIndent(value, getIndent(max).indent).split(C_NEWLINE); - - /* We replaced the initial bullet with something - * else above, which was used to trick - * `removeIndentation` into removing some more - * characters when possible. However, that could - * result in the initial line to be stripped more - * than it should be. */ - trimmedLines[0] = rest; - - offsets[line] = (offsets[line] || 0) + bullet.length; - line++; - - index = 0; - length = lines.length; - - while (++index < length) { - offsets[line] = (offsets[line] || 0) + - lines[index].length - trimmedLines[index].length; - line++; - } - - return trimmedLines.join(C_NEWLINE); - - function replacer($0, $1, $2, $3, $4) { - bullet = $1 + $2 + $3; - rest = $4; - - /* Make sure that the first nine numbered list items - * can indent with an extra space. That is, when - * the bullet did not receive an extra final space. */ - if (Number($2) < 10 && bullet.length % 2 === 1) { - $2 = C_SPACE + $2; - } - - max = $1 + repeat(C_SPACE, $2.length) + $3; - - return max + rest; - } -} diff --git a/node_modules/remark-parse/lib/tokenize/newline.js b/node_modules/remark-parse/lib/tokenize/newline.js deleted file mode 100644 index 6008670c..00000000 --- a/node_modules/remark-parse/lib/tokenize/newline.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict'; - -var whitespace = require('is-whitespace-character'); - -module.exports = newline; - -/* Tokenise newline. */ -function newline(eat, value, silent) { - var character = value.charAt(0); - var length; - var subvalue; - var queue; - var index; - - if (character !== '\n') { - return; - } - - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - index = 1; - length = value.length; - subvalue = character; - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if (!whitespace(character)) { - break; - } - - queue += character; - - if (character === '\n') { - subvalue += queue; - queue = ''; - } - - index++; - } - - eat(subvalue); -} diff --git a/node_modules/remark-parse/lib/tokenize/paragraph.js b/node_modules/remark-parse/lib/tokenize/paragraph.js deleted file mode 100644 index 1492a027..00000000 --- a/node_modules/remark-parse/lib/tokenize/paragraph.js +++ /dev/null @@ -1,122 +0,0 @@ -'use strict'; - -var trim = require('trim'); -var decimal = require('is-decimal'); -var trimTrailingLines = require('trim-trailing-lines'); -var interrupt = require('../util/interrupt'); - -module.exports = paragraph; - -var C_NEWLINE = '\n'; -var C_TAB = '\t'; -var C_SPACE = ' '; - -var TAB_SIZE = 4; - -/* Tokenise paragraph. */ -function paragraph(eat, value, silent) { - var self = this; - var settings = self.options; - var commonmark = settings.commonmark; - var gfm = settings.gfm; - var tokenizers = self.blockTokenizers; - var interruptors = self.interruptParagraph; - var index = value.indexOf(C_NEWLINE); - var length = value.length; - var position; - var subvalue; - var character; - var size; - var now; - - while (index < length) { - /* Eat everything if there’s no following newline. */ - if (index === -1) { - index = length; - break; - } - - /* Stop if the next character is NEWLINE. */ - if (value.charAt(index + 1) === C_NEWLINE) { - break; - } - - /* In commonmark-mode, following indented lines - * are part of the paragraph. */ - if (commonmark) { - size = 0; - position = index + 1; - - while (position < length) { - character = value.charAt(position); - - if (character === C_TAB) { - size = TAB_SIZE; - break; - } else if (character === C_SPACE) { - size++; - } else { - break; - } - - position++; - } - - if (size >= TAB_SIZE) { - index = value.indexOf(C_NEWLINE, index + 1); - continue; - } - } - - subvalue = value.slice(index + 1); - - /* Check if the following code contains a possible - * block. */ - if (interrupt(interruptors, tokenizers, self, [eat, subvalue, true])) { - break; - } - - /* Break if the following line starts a list, when - * already in a list, or when in commonmark, or when - * in gfm mode and the bullet is *not* numeric. */ - if ( - tokenizers.list.call(self, eat, subvalue, true) && - ( - self.inList || - commonmark || - (gfm && !decimal(trim.left(subvalue).charAt(0))) - ) - ) { - break; - } - - position = index; - index = value.indexOf(C_NEWLINE, index + 1); - - if (index !== -1 && trim(value.slice(position, index)) === '') { - index = position; - break; - } - } - - subvalue = value.slice(0, index); - - if (trim(subvalue) === '') { - eat(subvalue); - - return null; - } - - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - now = eat.now(); - subvalue = trimTrailingLines(subvalue); - - return eat(subvalue)({ - type: 'paragraph', - children: self.tokenizeInline(subvalue, now) - }); -} diff --git a/node_modules/remark-parse/lib/tokenize/reference.js b/node_modules/remark-parse/lib/tokenize/reference.js deleted file mode 100644 index 50713f1c..00000000 --- a/node_modules/remark-parse/lib/tokenize/reference.js +++ /dev/null @@ -1,206 +0,0 @@ -'use strict'; - -var whitespace = require('is-whitespace-character'); -var locate = require('../locate/link'); -var normalize = require('../util/normalize'); - -module.exports = reference; -reference.locator = locate; - -var T_LINK = 'link'; -var T_IMAGE = 'image'; -var T_FOOTNOTE = 'footnote'; -var REFERENCE_TYPE_SHORTCUT = 'shortcut'; -var REFERENCE_TYPE_COLLAPSED = 'collapsed'; -var REFERENCE_TYPE_FULL = 'full'; -var C_CARET = '^'; -var C_BACKSLASH = '\\'; -var C_BRACKET_OPEN = '['; -var C_BRACKET_CLOSE = ']'; - -function reference(eat, value, silent) { - var self = this; - var character = value.charAt(0); - var index = 0; - var length = value.length; - var subvalue = ''; - var intro = ''; - var type = T_LINK; - var referenceType = REFERENCE_TYPE_SHORTCUT; - var content; - var identifier; - var now; - var node; - var exit; - var queue; - var bracketed; - var depth; - - /* Check whether we’re eating an image. */ - if (character === '!') { - type = T_IMAGE; - intro = character; - character = value.charAt(++index); - } - - if (character !== C_BRACKET_OPEN) { - return; - } - - index++; - intro += character; - queue = ''; - - /* Check whether we’re eating a footnote. */ - if (self.options.footnotes && value.charAt(index) === C_CARET) { - /* Exit if `![^` is found, so the `!` will be seen as text after this, - * and we’ll enter this function again when `[^` is found. */ - if (type === T_IMAGE) { - return; - } - - intro += C_CARET; - index++; - type = T_FOOTNOTE; - } - - /* Eat the text. */ - depth = 0; - - while (index < length) { - character = value.charAt(index); - - if (character === C_BRACKET_OPEN) { - bracketed = true; - depth++; - } else if (character === C_BRACKET_CLOSE) { - if (!depth) { - break; - } - - depth--; - } - - if (character === C_BACKSLASH) { - queue += C_BACKSLASH; - character = value.charAt(++index); - } - - queue += character; - index++; - } - - subvalue = queue; - content = queue; - character = value.charAt(index); - - if (character !== C_BRACKET_CLOSE) { - return; - } - - index++; - subvalue += character; - queue = ''; - - while (index < length) { - character = value.charAt(index); - - if (!whitespace(character)) { - break; - } - - queue += character; - index++; - } - - character = value.charAt(index); - - /* Inline footnotes cannot have an identifier. */ - if (type !== T_FOOTNOTE && character === C_BRACKET_OPEN) { - identifier = ''; - queue += character; - index++; - - while (index < length) { - character = value.charAt(index); - - if (character === C_BRACKET_OPEN || character === C_BRACKET_CLOSE) { - break; - } - - if (character === C_BACKSLASH) { - identifier += C_BACKSLASH; - character = value.charAt(++index); - } - - identifier += character; - index++; - } - - character = value.charAt(index); - - if (character === C_BRACKET_CLOSE) { - referenceType = identifier ? REFERENCE_TYPE_FULL : REFERENCE_TYPE_COLLAPSED; - queue += identifier + character; - index++; - } else { - identifier = ''; - } - - subvalue += queue; - queue = ''; - } else { - if (!content) { - return; - } - - identifier = content; - } - - /* Brackets cannot be inside the identifier. */ - if (referenceType !== REFERENCE_TYPE_FULL && bracketed) { - return; - } - - subvalue = intro + subvalue; - - if (type === T_LINK && self.inLink) { - return null; - } - - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - if (type === T_FOOTNOTE && content.indexOf(' ') !== -1) { - return eat(subvalue)({ - type: 'footnote', - children: this.tokenizeInline(content, eat.now()) - }); - } - - now = eat.now(); - now.column += intro.length; - now.offset += intro.length; - identifier = referenceType === REFERENCE_TYPE_FULL ? identifier : content; - - node = { - type: type + 'Reference', - identifier: normalize(identifier) - }; - - if (type === T_LINK || type === T_IMAGE) { - node.referenceType = referenceType; - } - - if (type === T_LINK) { - exit = self.enterLink(); - node.children = self.tokenizeInline(content, now); - exit(); - } else if (type === T_IMAGE) { - node.alt = self.decode.raw(self.unescape(content), now) || null; - } - - return eat(subvalue)(node); -} diff --git a/node_modules/remark-parse/lib/tokenize/strong.js b/node_modules/remark-parse/lib/tokenize/strong.js deleted file mode 100644 index 12d5785b..00000000 --- a/node_modules/remark-parse/lib/tokenize/strong.js +++ /dev/null @@ -1,84 +0,0 @@ -'use strict'; - -var trim = require('trim'); -var whitespace = require('is-whitespace-character'); -var locate = require('../locate/strong'); - -module.exports = strong; -strong.locator = locate; - -var C_ASTERISK = '*'; -var C_UNDERSCORE = '_'; - -function strong(eat, value, silent) { - var self = this; - var index = 0; - var character = value.charAt(index); - var now; - var pedantic; - var marker; - var queue; - var subvalue; - var length; - var prev; - - if ( - (character !== C_ASTERISK && character !== C_UNDERSCORE) || - value.charAt(++index) !== character - ) { - return; - } - - pedantic = self.options.pedantic; - marker = character; - subvalue = marker + marker; - length = value.length; - index++; - queue = ''; - character = ''; - - if (pedantic && whitespace(value.charAt(index))) { - return; - } - - while (index < length) { - prev = character; - character = value.charAt(index); - - if ( - character === marker && - value.charAt(index + 1) === marker && - (!pedantic || !whitespace(prev)) - ) { - character = value.charAt(index + 2); - - if (character !== marker) { - if (!trim(queue)) { - return; - } - - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - now = eat.now(); - now.column += 2; - now.offset += 2; - - return eat(subvalue + queue + subvalue)({ - type: 'strong', - children: self.tokenizeInline(queue, now) - }); - } - } - - if (!pedantic && character === '\\') { - queue += character; - character = value.charAt(++index); - } - - queue += character; - index++; - } -} diff --git a/node_modules/remark-parse/lib/tokenize/table.js b/node_modules/remark-parse/lib/tokenize/table.js deleted file mode 100644 index ce93b1d2..00000000 --- a/node_modules/remark-parse/lib/tokenize/table.js +++ /dev/null @@ -1,266 +0,0 @@ -'use strict'; - -var whitespace = require('is-whitespace-character'); - -module.exports = table; - -var C_BACKSLASH = '\\'; -var C_TICK = '`'; -var C_DASH = '-'; -var C_PIPE = '|'; -var C_COLON = ':'; -var C_SPACE = ' '; -var C_NEWLINE = '\n'; -var C_TAB = '\t'; - -var MIN_TABLE_COLUMNS = 1; -var MIN_TABLE_ROWS = 2; - -var TABLE_ALIGN_LEFT = 'left'; -var TABLE_ALIGN_CENTER = 'center'; -var TABLE_ALIGN_RIGHT = 'right'; -var TABLE_ALIGN_NONE = null; - -function table(eat, value, silent) { - var self = this; - var index; - var alignments; - var alignment; - var subvalue; - var row; - var length; - var lines; - var queue; - var character; - var hasDash; - var align; - var cell; - var preamble; - var count; - var opening; - var now; - var position; - var lineCount; - var line; - var rows; - var table; - var lineIndex; - var pipeIndex; - var first; - - /* Exit when not in gfm-mode. */ - if (!self.options.gfm) { - return; - } - - /* Get the rows. - * Detecting tables soon is hard, so there are some - * checks for performance here, such as the minimum - * number of rows, and allowed characters in the - * alignment row. */ - index = 0; - lineCount = 0; - length = value.length + 1; - lines = []; - - while (index < length) { - lineIndex = value.indexOf(C_NEWLINE, index); - pipeIndex = value.indexOf(C_PIPE, index + 1); - - if (lineIndex === -1) { - lineIndex = value.length; - } - - if (pipeIndex === -1 || pipeIndex > lineIndex) { - if (lineCount < MIN_TABLE_ROWS) { - return; - } - - break; - } - - lines.push(value.slice(index, lineIndex)); - lineCount++; - index = lineIndex + 1; - } - - /* Parse the alignment row. */ - subvalue = lines.join(C_NEWLINE); - alignments = lines.splice(1, 1)[0] || []; - index = 0; - length = alignments.length; - lineCount--; - alignment = false; - align = []; - - while (index < length) { - character = alignments.charAt(index); - - if (character === C_PIPE) { - hasDash = null; - - if (alignment === false) { - if (first === false) { - return; - } - } else { - align.push(alignment); - alignment = false; - } - - first = false; - } else if (character === C_DASH) { - hasDash = true; - alignment = alignment || TABLE_ALIGN_NONE; - } else if (character === C_COLON) { - if (alignment === TABLE_ALIGN_LEFT) { - alignment = TABLE_ALIGN_CENTER; - } else if (hasDash && alignment === TABLE_ALIGN_NONE) { - alignment = TABLE_ALIGN_RIGHT; - } else { - alignment = TABLE_ALIGN_LEFT; - } - } else if (!whitespace(character)) { - return; - } - - index++; - } - - if (alignment !== false) { - align.push(alignment); - } - - /* Exit when without enough columns. */ - if (align.length < MIN_TABLE_COLUMNS) { - return; - } - - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - /* Parse the rows. */ - position = -1; - rows = []; - - table = eat(subvalue).reset({ - type: 'table', - align: align, - children: rows - }); - - while (++position < lineCount) { - line = lines[position]; - row = {type: 'tableRow', children: []}; - - /* Eat a newline character when this is not the - * first row. */ - if (position) { - eat(C_NEWLINE); - } - - /* Eat the row. */ - eat(line).reset(row, table); - - length = line.length + 1; - index = 0; - queue = ''; - cell = ''; - preamble = true; - count = null; - opening = null; - - while (index < length) { - character = line.charAt(index); - - if (character === C_TAB || character === C_SPACE) { - if (cell) { - queue += character; - } else { - eat(character); - } - - index++; - continue; - } - - if (character === '' || character === C_PIPE) { - if (preamble) { - eat(character); - } else { - if (character && opening) { - queue += character; - index++; - continue; - } - - if ((cell || character) && !preamble) { - subvalue = cell; - - if (queue.length > 1) { - if (character) { - subvalue += queue.slice(0, queue.length - 1); - queue = queue.charAt(queue.length - 1); - } else { - subvalue += queue; - queue = ''; - } - } - - now = eat.now(); - - eat(subvalue)({ - type: 'tableCell', - children: self.tokenizeInline(cell, now) - }, row); - } - - eat(queue + character); - - queue = ''; - cell = ''; - } - } else { - if (queue) { - cell += queue; - queue = ''; - } - - cell += character; - - if (character === C_BACKSLASH && index !== length - 2) { - cell += line.charAt(index + 1); - index++; - } - - if (character === C_TICK) { - count = 1; - - while (line.charAt(index + 1) === character) { - cell += character; - index++; - count++; - } - - if (!opening) { - opening = count; - } else if (count >= opening) { - opening = 0; - } - } - } - - preamble = false; - index++; - } - - /* Eat the alignment row. */ - if (!position) { - eat(C_NEWLINE + alignments); - } - } - - return table; -} diff --git a/node_modules/remark-parse/lib/tokenize/text.js b/node_modules/remark-parse/lib/tokenize/text.js deleted file mode 100644 index 4aedfa90..00000000 --- a/node_modules/remark-parse/lib/tokenize/text.js +++ /dev/null @@ -1,58 +0,0 @@ -'use strict'; - -module.exports = text; - -function text(eat, value, silent) { - var self = this; - var methods; - var tokenizers; - var index; - var length; - var subvalue; - var position; - var tokenizer; - var name; - var min; - var now; - - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - methods = self.inlineMethods; - length = methods.length; - tokenizers = self.inlineTokenizers; - index = -1; - min = value.length; - - while (++index < length) { - name = methods[index]; - - if (name === 'text' || !tokenizers[name]) { - continue; - } - - tokenizer = tokenizers[name].locator; - - if (!tokenizer) { - eat.file.fail('Missing locator: `' + name + '`'); - } - - position = tokenizer.call(self, value, 1); - - if (position !== -1 && position < min) { - min = position; - } - } - - subvalue = value.slice(0, min); - now = eat.now(); - - self.decode(subvalue, now, function (content, position, source) { - eat(source || content)({ - type: 'text', - value: content - }); - }); -} diff --git a/node_modules/remark-parse/lib/tokenize/thematic-break.js b/node_modules/remark-parse/lib/tokenize/thematic-break.js deleted file mode 100644 index 2391e3f5..00000000 --- a/node_modules/remark-parse/lib/tokenize/thematic-break.js +++ /dev/null @@ -1,70 +0,0 @@ -'use strict'; - -module.exports = thematicBreak; - -var C_NEWLINE = '\n'; -var C_TAB = '\t'; -var C_SPACE = ' '; -var C_ASTERISK = '*'; -var C_UNDERSCORE = '_'; -var C_DASH = '-'; - -var THEMATIC_BREAK_MARKER_COUNT = 3; - -function thematicBreak(eat, value, silent) { - var index = -1; - var length = value.length + 1; - var subvalue = ''; - var character; - var marker; - var markerCount; - var queue; - - while (++index < length) { - character = value.charAt(index); - - if (character !== C_TAB && character !== C_SPACE) { - break; - } - - subvalue += character; - } - - if ( - character !== C_ASTERISK && - character !== C_DASH && - character !== C_UNDERSCORE - ) { - return; - } - - marker = character; - subvalue += character; - markerCount = 1; - queue = ''; - - while (++index < length) { - character = value.charAt(index); - - if (character === marker) { - markerCount++; - subvalue += queue + marker; - queue = ''; - } else if (character === C_SPACE) { - queue += character; - } else if ( - markerCount >= THEMATIC_BREAK_MARKER_COUNT && - (!character || character === C_NEWLINE) - ) { - subvalue += queue; - - if (silent) { - return true; - } - - return eat(subvalue)({type: 'thematicBreak'}); - } else { - return; - } - } -} diff --git a/node_modules/remark-parse/lib/tokenize/url.js b/node_modules/remark-parse/lib/tokenize/url.js deleted file mode 100644 index 297940bf..00000000 --- a/node_modules/remark-parse/lib/tokenize/url.js +++ /dev/null @@ -1,144 +0,0 @@ -'use strict'; - -var decode = require('parse-entities'); -var whitespace = require('is-whitespace-character'); -var locate = require('../locate/url'); - -module.exports = url; -url.locator = locate; -url.notInLink = true; - -var C_BRACKET_OPEN = '['; -var C_BRACKET_CLOSE = ']'; -var C_PAREN_OPEN = '('; -var C_PAREN_CLOSE = ')'; -var C_LT = '<'; -var C_AT_SIGN = '@'; - -var HTTP_PROTOCOL = 'http://'; -var HTTPS_PROTOCOL = 'https://'; -var MAILTO_PROTOCOL = 'mailto:'; - -var PROTOCOLS = [ - HTTP_PROTOCOL, - HTTPS_PROTOCOL, - MAILTO_PROTOCOL -]; - -var PROTOCOLS_LENGTH = PROTOCOLS.length; - -function url(eat, value, silent) { - var self = this; - var subvalue; - var content; - var character; - var index; - var position; - var protocol; - var match; - var length; - var queue; - var parenCount; - var nextCharacter; - var exit; - - if (!self.options.gfm) { - return; - } - - subvalue = ''; - index = -1; - length = PROTOCOLS_LENGTH; - - while (++index < length) { - protocol = PROTOCOLS[index]; - match = value.slice(0, protocol.length); - - if (match.toLowerCase() === protocol) { - subvalue = match; - break; - } - } - - if (!subvalue) { - return; - } - - index = subvalue.length; - length = value.length; - queue = ''; - parenCount = 0; - - while (index < length) { - character = value.charAt(index); - - if (whitespace(character) || character === C_LT) { - break; - } - - if ( - character === '.' || - character === ',' || - character === ':' || - character === ';' || - character === '"' || - character === '\'' || - character === ')' || - character === ']' - ) { - nextCharacter = value.charAt(index + 1); - - if (!nextCharacter || whitespace(nextCharacter)) { - break; - } - } - - if (character === C_PAREN_OPEN || character === C_BRACKET_OPEN) { - parenCount++; - } - - if (character === C_PAREN_CLOSE || character === C_BRACKET_CLOSE) { - parenCount--; - - if (parenCount < 0) { - break; - } - } - - queue += character; - index++; - } - - if (!queue) { - return; - } - - subvalue += queue; - content = subvalue; - - if (protocol === MAILTO_PROTOCOL) { - position = queue.indexOf(C_AT_SIGN); - - if (position === -1 || position === length - 1) { - return; - } - - content = content.substr(MAILTO_PROTOCOL.length); - } - - /* istanbul ignore if - never used (yet) */ - if (silent) { - return true; - } - - exit = self.enterLink(); - content = self.tokenizeInline(content, eat.now()); - exit(); - - return eat(subvalue)({ - type: 'link', - title: null, - url: decode(subvalue, {nonTerminated: false}), - children: content - }); -} diff --git a/node_modules/remark-parse/lib/tokenizer.js b/node_modules/remark-parse/lib/tokenizer.js deleted file mode 100644 index 498ef22a..00000000 --- a/node_modules/remark-parse/lib/tokenizer.js +++ /dev/null @@ -1,331 +0,0 @@ -'use strict'; - -module.exports = factory; - -var MERGEABLE_NODES = { - text: mergeText, - blockquote: mergeBlockquote -}; - -/* Check whether a node is mergeable with adjacent nodes. */ -function mergeable(node) { - var start; - var end; - - if (node.type !== 'text' || !node.position) { - return true; - } - - start = node.position.start; - end = node.position.end; - - /* Only merge nodes which occupy the same size as their - * `value`. */ - return start.line !== end.line || - end.column - start.column === node.value.length; -} - -/* Merge two text nodes: `node` into `prev`. */ -function mergeText(prev, node) { - prev.value += node.value; - - return prev; -} - -/* Merge two blockquotes: `node` into `prev`, unless in - * CommonMark mode. */ -function mergeBlockquote(prev, node) { - if (this.options.commonmark) { - return node; - } - - prev.children = prev.children.concat(node.children); - - return prev; -} - -/* Construct a tokenizer. This creates both - * `tokenizeInline` and `tokenizeBlock`. */ -function factory(type) { - return tokenize; - - /* Tokenizer for a bound `type`. */ - function tokenize(value, location) { - var self = this; - var offset = self.offset; - var tokens = []; - var methods = self[type + 'Methods']; - var tokenizers = self[type + 'Tokenizers']; - var line = location.line; - var column = location.column; - var index; - var length; - var method; - var name; - var matched; - var valueLength; - - /* Trim white space only lines. */ - if (!value) { - return tokens; - } - - /* Expose on `eat`. */ - eat.now = now; - eat.file = self.file; - - /* Sync initial offset. */ - updatePosition(''); - - /* Iterate over `value`, and iterate over all - * tokenizers. When one eats something, re-iterate - * with the remaining value. If no tokenizer eats, - * something failed (should not happen) and an - * exception is thrown. */ - while (value) { - index = -1; - length = methods.length; - matched = false; - - while (++index < length) { - name = methods[index]; - method = tokenizers[name]; - - if ( - method && - /* istanbul ignore next */ (!method.onlyAtStart || self.atStart) && - (!method.notInList || !self.inList) && - (!method.notInBlock || !self.inBlock) && - (!method.notInLink || !self.inLink) - ) { - valueLength = value.length; - - method.apply(self, [eat, value]); - - matched = valueLength !== value.length; - - if (matched) { - break; - } - } - } - - /* istanbul ignore if */ - if (!matched) { - self.file.fail(new Error('Infinite loop'), eat.now()); - } - } - - self.eof = now(); - - return tokens; - - /* Update line, column, and offset based on - * `value`. */ - function updatePosition(subvalue) { - var lastIndex = -1; - var index = subvalue.indexOf('\n'); - - while (index !== -1) { - line++; - lastIndex = index; - index = subvalue.indexOf('\n', index + 1); - } - - if (lastIndex === -1) { - column += subvalue.length; - } else { - column = subvalue.length - lastIndex; - } - - if (line in offset) { - if (lastIndex !== -1) { - column += offset[line]; - } else if (column <= offset[line]) { - column = offset[line] + 1; - } - } - } - - /* Get offset. Called before the first character is - * eaten to retrieve the range's offsets. */ - function getOffset() { - var indentation = []; - var pos = line + 1; - - /* Done. Called when the last character is - * eaten to retrieve the range’s offsets. */ - return function () { - var last = line + 1; - - while (pos < last) { - indentation.push((offset[pos] || 0) + 1); - - pos++; - } - - return indentation; - }; - } - - /* Get the current position. */ - function now() { - var pos = {line: line, column: column}; - - pos.offset = self.toOffset(pos); - - return pos; - } - - /* Store position information for a node. */ - function Position(start) { - this.start = start; - this.end = now(); - } - - /* Throw when a value is incorrectly eaten. - * This shouldn’t happen but will throw on new, - * incorrect rules. */ - function validateEat(subvalue) { - /* istanbul ignore if */ - if (value.substring(0, subvalue.length) !== subvalue) { - /* Capture stack-trace. */ - self.file.fail( - new Error( - 'Incorrectly eaten value: please report this ' + - 'warning on http://git.io/vg5Ft' - ), - now() - ); - } - } - - /* Mark position and patch `node.position`. */ - function position() { - var before = now(); - - return update; - - /* Add the position to a node. */ - function update(node, indent) { - var prev = node.position; - var start = prev ? prev.start : before; - var combined = []; - var n = prev && prev.end.line; - var l = before.line; - - node.position = new Position(start); - - /* If there was already a `position`, this - * node was merged. Fixing `start` wasn’t - * hard, but the indent is different. - * Especially because some information, the - * indent between `n` and `l` wasn’t - * tracked. Luckily, that space is - * (should be?) empty, so we can safely - * check for it now. */ - if (prev && indent && prev.indent) { - combined = prev.indent; - - if (n < l) { - while (++n < l) { - combined.push((offset[n] || 0) + 1); - } - - combined.push(before.column); - } - - indent = combined.concat(indent); - } - - node.position.indent = indent || []; - - return node; - } - } - - /* Add `node` to `parent`s children or to `tokens`. - * Performs merges where possible. */ - function add(node, parent) { - var children = parent ? parent.children : tokens; - var prev = children[children.length - 1]; - - if ( - prev && - node.type === prev.type && - node.type in MERGEABLE_NODES && - mergeable(prev) && - mergeable(node) - ) { - node = MERGEABLE_NODES[node.type].call(self, prev, node); - } - - if (node !== prev) { - children.push(node); - } - - if (self.atStart && tokens.length !== 0) { - self.exitStart(); - } - - return node; - } - - /* Remove `subvalue` from `value`. - * `subvalue` must be at the start of `value`. */ - function eat(subvalue) { - var indent = getOffset(); - var pos = position(); - var current = now(); - - validateEat(subvalue); - - apply.reset = reset; - reset.test = test; - apply.test = test; - - value = value.substring(subvalue.length); - - updatePosition(subvalue); - - indent = indent(); - - return apply; - - /* Add the given arguments, add `position` to - * the returned node, and return the node. */ - function apply(node, parent) { - return pos(add(pos(node), parent), indent); - } - - /* Functions just like apply, but resets the - * content: the line and column are reversed, - * and the eaten value is re-added. - * This is useful for nodes with a single - * type of content, such as lists and tables. - * See `apply` above for what parameters are - * expected. */ - function reset() { - var node = apply.apply(null, arguments); - - line = current.line; - column = current.column; - value = subvalue + value; - - return node; - } - - /* Test the position, after eating, and reverse - * to a not-eaten state. */ - function test() { - var result = pos({}); - - line = current.line; - column = current.column; - value = subvalue + value; - - return result.position; - } - } - } -} diff --git a/node_modules/remark-parse/lib/unescape.js b/node_modules/remark-parse/lib/unescape.js deleted file mode 100644 index 321900e7..00000000 --- a/node_modules/remark-parse/lib/unescape.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -module.exports = factory; - -/* Factory to de-escape a value, based on a list at `key` - * in `ctx`. */ -function factory(ctx, key) { - return unescape; - - /* De-escape a string using the expression at `key` - * in `ctx`. */ - function unescape(value) { - var prev = 0; - var index = value.indexOf('\\'); - var escape = ctx[key]; - var queue = []; - var character; - - while (index !== -1) { - queue.push(value.slice(prev, index)); - prev = index + 1; - character = value.charAt(prev); - - /* If the following character is not a valid escape, - * add the slash. */ - if (!character || escape.indexOf(character) === -1) { - queue.push('\\'); - } - - index = value.indexOf('\\', prev); - } - - queue.push(value.slice(prev)); - - return queue.join(''); - } -} diff --git a/node_modules/remark-parse/lib/util/get-indentation.js b/node_modules/remark-parse/lib/util/get-indentation.js deleted file mode 100644 index 3e09e141..00000000 --- a/node_modules/remark-parse/lib/util/get-indentation.js +++ /dev/null @@ -1,32 +0,0 @@ -'use strict'; - -module.exports = indentation; - -/* Map of characters, and their column length, - * which can be used as indentation. */ -var characters = {' ': 1, '\t': 4}; - -/* Gets indentation information for a line. */ -function indentation(value) { - var index = 0; - var indent = 0; - var character = value.charAt(index); - var stops = {}; - var size; - - while (character in characters) { - size = characters[character]; - - indent += size; - - if (size > 1) { - indent = Math.floor(indent / size) * size; - } - - stops[indent] = index; - - character = value.charAt(++index); - } - - return {indent: indent, stops: stops}; -} diff --git a/node_modules/remark-parse/lib/util/html.js b/node_modules/remark-parse/lib/util/html.js deleted file mode 100644 index 5f211f13..00000000 --- a/node_modules/remark-parse/lib/util/html.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict'; - -var attributeName = '[a-zA-Z_:][a-zA-Z0-9:._-]*'; -var unquoted = '[^"\'=<>`\\u0000-\\u0020]+'; -var singleQuoted = '\'[^\']*\''; -var doubleQuoted = '"[^"]*"'; -var attributeValue = '(?:' + unquoted + '|' + singleQuoted + '|' + doubleQuoted + ')'; -var attribute = '(?:\\s+' + attributeName + '(?:\\s*=\\s*' + attributeValue + ')?)'; -var openTag = '<[A-Za-z][A-Za-z0-9\\-]*' + attribute + '*\\s*\\/?>'; -var closeTag = '<\\/[A-Za-z][A-Za-z0-9\\-]*\\s*>'; -var comment = '<!---->|<!--(?:-?[^>-])(?:-?[^-])*-->'; -var processing = '<[?].*?[?]>'; -var declaration = '<![A-Za-z]+\\s+[^>]*>'; -var cdata = '<!\\[CDATA\\[[\\s\\S]*?\\]\\]>'; - -exports.openCloseTag = new RegExp('^(?:' + openTag + '|' + closeTag + ')'); - -exports.tag = new RegExp('^(?:' + - openTag + '|' + - closeTag + '|' + - comment + '|' + - processing + '|' + - declaration + '|' + - cdata + -')'); diff --git a/node_modules/remark-parse/lib/util/interrupt.js b/node_modules/remark-parse/lib/util/interrupt.js deleted file mode 100644 index e3178ab4..00000000 --- a/node_modules/remark-parse/lib/util/interrupt.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict'; - -module.exports = interrupt; - -function interrupt(interruptors, tokenizers, ctx, params) { - var bools = ['pedantic', 'commonmark']; - var count = bools.length; - var length = interruptors.length; - var index = -1; - var interruptor; - var config; - var fn; - var offset; - var bool; - var ignore; - - while (++index < length) { - interruptor = interruptors[index]; - config = interruptor[1] || {}; - fn = interruptor[0]; - offset = -1; - ignore = false; - - while (++offset < count) { - bool = bools[offset]; - - if (config[bool] !== undefined && config[bool] !== ctx.options[bool]) { - ignore = true; - break; - } - } - - if (ignore) { - continue; - } - - if (tokenizers[fn].apply(ctx, params)) { - return true; - } - } - - return false; -} diff --git a/node_modules/remark-parse/lib/util/normalize.js b/node_modules/remark-parse/lib/util/normalize.js deleted file mode 100644 index 846ceeec..00000000 --- a/node_modules/remark-parse/lib/util/normalize.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict'; - -var collapseWhiteSpace = require('collapse-white-space'); - -module.exports = normalize; - -/* Normalize an identifier. Collapses multiple white space - * characters into a single space, and removes casing. */ -function normalize(value) { - return collapseWhiteSpace(value).toLowerCase(); -} diff --git a/node_modules/remark-parse/lib/util/remove-indentation.js b/node_modules/remark-parse/lib/util/remove-indentation.js deleted file mode 100644 index 20f18be7..00000000 --- a/node_modules/remark-parse/lib/util/remove-indentation.js +++ /dev/null @@ -1,78 +0,0 @@ -'use strict'; - -var trim = require('trim'); -var repeat = require('repeat-string'); -var getIndent = require('./get-indentation'); - -module.exports = indentation; - -var C_SPACE = ' '; -var C_NEWLINE = '\n'; -var C_TAB = '\t'; - -/* Remove the minimum indent from every line in `value`. - * Supports both tab, spaced, and mixed indentation (as - * well as possible). */ -function indentation(value, maximum) { - var values = value.split(C_NEWLINE); - var position = values.length + 1; - var minIndent = Infinity; - var matrix = []; - var index; - var indentation; - var stops; - var padding; - - values.unshift(repeat(C_SPACE, maximum) + '!'); - - while (position--) { - indentation = getIndent(values[position]); - - matrix[position] = indentation.stops; - - if (trim(values[position]).length === 0) { - continue; - } - - if (indentation.indent) { - if (indentation.indent > 0 && indentation.indent < minIndent) { - minIndent = indentation.indent; - } - } else { - minIndent = Infinity; - - break; - } - } - - if (minIndent !== Infinity) { - position = values.length; - - while (position--) { - stops = matrix[position]; - index = minIndent; - - while (index && !(index in stops)) { - index--; - } - - if ( - trim(values[position]).length !== 0 && - minIndent && - index !== minIndent - ) { - padding = C_TAB; - } else { - padding = ''; - } - - values[position] = padding + values[position].slice( - index in stops ? stops[index] + 1 : 0 - ); - } - } - - values.shift(); - - return values.join(C_NEWLINE); -} diff --git a/node_modules/remark-parse/package.json b/node_modules/remark-parse/package.json index 822420cb..3b9c19d8 100644 --- a/node_modules/remark-parse/package.json +++ b/node_modules/remark-parse/package.json @@ -1,44 +1,46 @@ { "name": "remark-parse", - "version": "5.0.0", - "description": "Markdown parser for remark", + "version": "9.0.0", + "description": "remark plugin to parse Markdown", "license": "MIT", "keywords": [ + "unified", + "remark", + "remark-plugin", + "plugin", "markdown", + "mdast", "abstract", "syntax", "tree", "ast", "parse" ], - "homepage": "http://remark.js.org", - "repository": "https://github.com/remarkjs/remark/tree/master/packages/remark-parse", + "types": "types/index.d.ts", + "homepage": "https://remark.js.org", + "repository": "https://github.com/remarkjs/remark/tree/main/packages/remark-parse", "bugs": "https://github.com/remarkjs/remark/issues", - "author": "Titus Wormer <tituswormer@gmail.com> (http://wooorm.com)", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", "contributors": [ - "Titus Wormer <tituswormer@gmail.com> (http://wooorm.com)", - "Eugene Sharygin <eush77@gmail.com>" + "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", + "Eugene Sharygin <eush77@gmail.com>", + "Junyoung Choi <fluke8259@gmail.com>", + "Elijah Hamovitz <elijahhamovitz@gmail.com>", + "Ika <ikatyang@gmail.com>" ], "files": [ "index.js", - "lib" + "types/index.d.ts" ], "dependencies": { - "collapse-white-space": "^1.0.2", - "is-alphabetical": "^1.0.0", - "is-decimal": "^1.0.0", - "is-whitespace-character": "^1.0.0", - "is-word-character": "^1.0.0", - "markdown-escapes": "^1.0.0", - "parse-entities": "^1.1.0", - "repeat-string": "^1.5.4", - "state-toggle": "^1.0.0", - "trim": "0.0.1", - "trim-trailing-lines": "^1.0.0", - "unherit": "^1.0.4", - "unist-util-remove-position": "^1.0.0", - "vfile-location": "^2.0.0", - "xtend": "^4.0.1" + "mdast-util-from-markdown": "^0.8.0" + }, + "scripts": { + "test": "tape test.js" }, "xo": false } diff --git a/node_modules/remark-parse/readme.md b/node_modules/remark-parse/readme.md index ecaa6c09..13f4501f 100644 --- a/node_modules/remark-parse/readme.md +++ b/node_modules/remark-parse/readme.md @@ -1,11 +1,21 @@ -# remark-parse [![Build Status][build-badge]][build-status] [![Coverage Status][coverage-badge]][coverage-status] [![Chat][chat-badge]][chat] +# remark-parse -[Parser][] for [**unified**][unified]. Parses markdown to an -[**MDAST**][mdast] syntax tree. Used in the [**remark** -processor][processor]. Can be [extended][extend] to change how -markdown is parsed. +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] -## Installation +[Parser][] for [**unified**][unified]. +Parses Markdown to [**mdast**][mdast] syntax trees. +Built on [`micromark`][micromark] and +[`mdast-util-from-markdown`][from-markdown]. +Used in the [**remark** processor][remark] but can be used on its own as well. +Can be [extended][extend] to change how Markdown is parsed. + +## Install [npm][]: @@ -13,441 +23,175 @@ markdown is parsed. npm install remark-parse ``` -## Usage - -```js -var unified = require('unified'); -var createStream = require('unified-stream'); -var markdown = require('remark-parse'); -var html = require('remark-html'); - -var processor = unified() - .use(markdown, {commonmark: true}) - .use(html) - -process.stdin - .pipe(createStream(processor)) - .pipe(process.stdout); -``` - -## Table of Contents - -* [API](#api) - * [processor.use(parse\[, options\])](#processoruseparse-options) - * [parse.Parser](#parseparser) -* [Extending the Parser](#extending-the-parser) - * [Parser#blockTokenizers](#parserblocktokenizers) - * [Parser#blockMethods](#parserblockmethods) - * [Parser#inlineTokenizers](#parserinlinetokenizers) - * [Parser#inlineMethods](#parserinlinemethods) - * [function tokenizer(eat, value, silent)](#function-tokenizereat-value-silent) - * [tokenizer.locator(value, fromIndex)](#tokenizerlocatorvalue-fromindex) - * [eat(subvalue)](#eatsubvalue) - * [add(node\[, parent\])](#addnode-parent) - * [add.test()](#addtest) - * [add.reset(node\[, parent\])](#addresetnode-parent) - * [Turning off a tokenizer](#turning-off-a-tokenizer) -* [License](#license) - -## API - -### `processor.use(parse[, options])` - -Configure the `processor` to read markdown as input and process an -[**MDAST**][mdast] syntax tree. - -##### `options` - -Options are passed directly, or passed later through [`processor.data()`][data]. - -##### `options.gfm` - -```md -hello ~~hi~~ world -``` - -GFM mode (`boolean`, default: `true`) turns on: - -* [Fenced code blocks](https://help.github.com/articles/github-flavored-markdown/#fenced-code-blocks) -* [Autolinking of URLs](https://help.github.com/articles/github-flavored-markdown/#url-autolinking) -* [Deletions (strikethrough)](https://help.github.com/articles/github-flavored-markdown/#strikethrough) -* [Task lists](https://help.github.com/articles/writing-on-github/#task-lists) -* [Tables](https://help.github.com/articles/github-flavored-markdown/#tables) - -##### `options.commonmark` - -```md -This is a paragraph - and this is also part of the preceding paragraph. -``` - -CommonMark mode (`boolean`, default: `false`) allows: - -* Empty lines to split blockquotes -* Parentheses (`(` and `)`) around for link and image titles -* Any escaped [ASCII-punctuation][escapes] character -* Closing parenthesis (`)`) as an ordered list marker -* URL definitions (and footnotes, when enabled) in blockquotes - -CommonMark mode disallows: - -* Code directly following a paragraph -* ATX-headings (`# Hash headings`) without spacing after opening hashes - or and before closing hashes -* Setext headings (`Underline headings\n---`) when following a paragraph -* Newlines in link and image titles -* White space in link and image URLs in auto-links (links in brackets, - `<` and `>`) -* Lazy blockquote continuation, lines not preceded by a closing angle - bracket (`>`), for lists, code, and thematicBreak - -##### `options.footnotes` - -```md -Something something[^or something?]. - -And something else[^1]. - -[^1]: This reference footnote contains a paragraph... - - * ...and a list -``` - -Footnotes mode (`boolean`, default: `false`) enables reference footnotes and -inline footnotes. Both are wrapped in square brackets and preceded by a caret -(`^`), and can be referenced from inside other footnotes. - -##### `options.blocks` - -```md -<block>foo -</block> -``` - -Blocks (`Array.<string>`, default: list of [block HTML elements][blocks]) -exposes let’s users define block-level HTML elements. - -##### `options.pedantic` - -```md -Check out some_file_name.txt -``` - -Pedantic mode (`boolean`, default: `false`) turns on: - -* Emphasis (`_alpha_`) and importance (`__bravo__`) with underscores - in words -* Unordered lists with different markers (`*`, `-`, `+`) -* If `commonmark` is also turned on, ordered lists with different - markers (`.`, `)`) -* And pedantic mode removes less spaces in list-items (at most four, - instead of the whole indent) - -### `parse.Parser` - -Access to the [parser][], if you need it. - -## Extending the Parser - -Most often, using transformers to manipulate a syntax tree produces -the desired output. Sometimes, mainly when introducing new syntactic -entities with a certain level of precedence, interfacing with the parser -is necessary. - -If the `remark-parse` plugin is used, it adds a [`Parser`][parser] constructor -to the `processor`. Other plugins can add tokenizers to the parser’s prototype -to change how markdown is parsed. - -The below plugin adds a [tokenizer][] for at-mentions. +## Use ```js -module.exports = mentions; +var unified = require('unified') +var createStream = require('unified-stream') +var markdown = require('remark-parse') +var remark2rehype = require('remark-rehype') +var html = require('rehype-stringify') -function mentions() { - var Parser = this.Parser; - var tokenizers = Parser.prototype.inlineTokenizers; - var methods = Parser.prototype.inlineMethods; +var processor = unified().use(markdown).use(remark2rehype).use(html) - /* Add an inline tokenizer (defined in the following example). */ - tokenizers.mention = tokenizeMention; - - /* Run it just before `text`. */ - methods.splice(methods.indexOf('text'), 0, 'mention'); -} +process.stdin.pipe(createStream(processor)).pipe(process.stdout) ``` -### `Parser#blockTokenizers` - -An object mapping tokenizer names to [tokenizer][]s. These -tokenizers (for example: `fencedCode`, `table`, and `paragraph`) eat -from the start of a value to a line ending. - -See `#blockMethods` below for a list of methods that are included by -default. - -### `Parser#blockMethods` - -Array of `blockTokenizers` names (`string`) specifying the order in -which they run. - -<!--methods-block start--> - -* `newline` -* `indentedCode` -* `fencedCode` -* `blockquote` -* `atxHeading` -* `thematicBreak` -* `list` -* `setextHeading` -* `html` -* `footnote` -* `definition` -* `table` -* `paragraph` - -<!--methods-block end--> - -### `Parser#inlineTokenizers` - -An object mapping tokenizer names to [tokenizer][]s. These tokenizers -(for example: `url`, `reference`, and `emphasis`) eat from the start -of a value. To increase performance, they depend on [locator][]s. - -See `#inlineMethods` below for a list of methods that are included by -default. - -### `Parser#inlineMethods` - -Array of `inlineTokenizers` names (`string`) specifying the order in -which they run. - -<!--methods-inline start--> - -* `escape` -* `autoLink` -* `url` -* `html` -* `link` -* `reference` -* `strong` -* `emphasis` -* `deletion` -* `code` -* `break` -* `text` +[See **unified** for more examples »][unified] -<!--methods-inline end--> - -### `function tokenizer(eat, value, silent)` - -```js -tokenizeMention.notInLink = true; -tokenizeMention.locator = locateMention; - -function tokenizeMention(eat, value, silent) { - var match = /^@(\w+)/.exec(value); - - if (match) { - if (silent) { - return true; - } - - return eat(match[0])({ - type: 'link', - url: 'https://social-network/' + match[1], - children: [{type: 'text', value: match[0]}] - }); - } -} -``` - -The parser knows two types of tokenizers: block level and inline level. -Block level tokenizers are the same as inline level tokenizers, with -the exception that the latter must have a [locator][]. - -Tokenizers _test_ whether a document starts with a certain syntactic -entity. In _silent_ mode, they return whether that test passes. -In _normal_ mode, they consume that token, a process which is called -“eating”. Locators enable tokenizers to function faster by providing -information on where the next entity may occur. - -###### Signatures - -* `Node? = tokenizer(eat, value)` -* `boolean? = tokenizer(eat, value, silent)` - -###### Parameters - -* `eat` ([`Function`][eat]) — Eat, when applicable, an entity -* `value` (`string`) — Value which may start an entity -* `silent` (`boolean`, optional) — Whether to detect or consume - -###### Properties - -* `locator` ([`Function`][locator]) - — Required for inline tokenizers -* `onlyAtStart` (`boolean`) - — Whether nodes can only be found at the beginning of the document -* `notInBlock` (`boolean`) - — Whether nodes cannot be in blockquotes, lists, or footnote - definitions -* `notInList` (`boolean`) - — Whether nodes cannot be in lists -* `notInLink` (`boolean`) - — Whether nodes cannot be in links - -###### Returns - -* In _silent_ mode, whether a node can be found at the start of `value` -* In _normal_ mode, a node if it can be found at the start of `value` - -### `tokenizer.locator(value, fromIndex)` - -```js -function locateMention(value, fromIndex) { - return value.indexOf('@', fromIndex); -} -``` - -Locators are required for inline tokenization to keep the process -performant. Locators enable inline tokenizers to function faster by -providing information on the where the next entity occurs. Locators -may be wrong, it’s OK if there actually isn’t a node to be found at -the index they return, but they must skip any nodes. - -###### Parameters - -* `value` (`string`) — Value which may contain an entity -* `fromIndex` (`number`) — Position to start searching at - -###### Returns - -Index at which an entity may start, and `-1` otherwise. - -### `eat(subvalue)` - -```js -var add = eat('foo'); -``` - -Eat `subvalue`, which is a string at the start of the -[tokenize][tokenizer]d `value` (it’s tracked to ensure the correct -value is eaten). - -###### Parameters - -* `subvalue` (`string`) - Value to eat. - -###### Returns - -[`add`][add]. - -### `add(node[, parent])` - -```js -var add = eat('foo'); -add({type: 'text', value: 'foo'}); -``` - -Add [positional information][location] to `node` and add it to `parent`. - -###### Parameters - -* `node` ([`Node`][node]) - Node to patch position on and insert -* `parent` ([`Node`][node], optional) - Place to add `node` to in - the syntax tree. Defaults to the currently processed node - -###### Returns - -The given `node`. +## API -### `add.test()` +[See **unified** for API docs »][unified] + +### `processor().use(parse)` + +Configure the `processor` to read Markdown as input and process +[**mdast**][mdast] syntax trees. + +## Extending the parser + +See [`micromark`][micromark] and [`mdast-util-from-markdown`][from-markdown]. +Then create a wrapper plugin such as [`remark-gfm`][gfm]. + +## Security + +As Markdown is sometimes used for HTML, and improper use of HTML can open you up +to a [cross-site scripting (XSS)][xss] attack, use of remark can also be unsafe. +When going to HTML, use remark in combination with the [**rehype**][rehype] +ecosystem, and use [`rehype-sanitize`][sanitize] to make the tree safe. + +Use of remark plugins could also open you up to other attacks. +Carefully assess each plugin and the risks involved in using them. + +## Contribute + +See [`contributing.md`][contributing] in [`remarkjs/.github`][health] for ways +to get started. +See [`support.md`][support] for ways to get help. +Ideas for new plugins and tools can be posted in [`remarkjs/ideas`][ideas]. + +A curated list of awesome remark resources can be found in [**awesome +remark**][awesome]. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## Sponsor + +Support this effort and give back by sponsoring on [OpenCollective][collective]! + +<!--lint ignore no-html--> + +<table> +<tr valign="middle"> +<td width="20%" align="center" colspan="2"> + <a href="https://www.gatsbyjs.org">Gatsby</a> 🥇<br><br> + <a href="https://www.gatsbyjs.org"><img src="https://avatars1.githubusercontent.com/u/12551863?s=256&v=4" width="128"></a> +</td> +<td width="20%" align="center" colspan="2"> + <a href="https://vercel.com">Vercel</a> 🥇<br><br> + <a href="https://vercel.com"><img src="https://avatars1.githubusercontent.com/u/14985020?s=256&v=4" width="128"></a> +</td> +<td width="20%" align="center" colspan="2"> + <a href="https://www.netlify.com">Netlify</a><br><br> + <!--OC has a sharper image--> + <a href="https://www.netlify.com"><img src="https://images.opencollective.com/netlify/4087de2/logo/256.png" width="128"></a> +</td> +<td width="10%" align="center"> + <a href="https://www.holloway.com">Holloway</a><br><br> + <a href="https://www.holloway.com"><img src="https://avatars1.githubusercontent.com/u/35904294?s=128&v=4" width="64"></a> +</td> +<td width="10%" align="center"> + <a href="https://themeisle.com">ThemeIsle</a><br><br> + <a href="https://themeisle.com"><img src="https://avatars1.githubusercontent.com/u/58979018?s=128&v=4" width="64"></a> +</td> +<td width="10%" align="center"> + <a href="https://boosthub.io">Boost Hub</a><br><br> + <a href="https://boosthub.io"><img src="https://images.opencollective.com/boosthub/6318083/logo/128.png" width="64"></a> +</td> +<td width="10%" align="center"> + <a href="https://expo.io">Expo</a><br><br> + <a href="https://expo.io"><img src="https://avatars1.githubusercontent.com/u/12504344?s=128&v=4" width="64"></a> +</td> +</tr> +<tr valign="middle"> +<td width="100%" align="center" colspan="10"> + <br> + <a href="https://opencollective.com/unified"><strong>You?</strong></a> + <br><br> +</td> +</tr> +</table> -Get the [positional information][location] which would be patched on -`node` by `add`. +## License -###### Returns +[MIT][license] © [Titus Wormer][author] -[`Location`][location]. +<!-- Definitions --> -### `add.reset(node[, parent])` +[build-badge]: https://img.shields.io/travis/remarkjs/remark.svg -`add`, but resets the internal location. Useful for example in -lists, where the same content is first eaten for a list, and later -for list items +[build]: https://travis-ci.org/remarkjs/remark -###### Parameters +[coverage-badge]: https://img.shields.io/codecov/c/github/remarkjs/remark.svg -* `node` ([`Node`][node]) - Node to patch position on and insert -* `parent` ([`Node`][node], optional) - Place to add `node` to in - the syntax tree. Defaults to the currently processed node +[coverage]: https://codecov.io/github/remarkjs/remark -###### Returns +[downloads-badge]: https://img.shields.io/npm/dm/remark-parse.svg -The given `node`. +[downloads]: https://www.npmjs.com/package/remark-parse -### Turning off a tokenizer +[size-badge]: https://img.shields.io/bundlephobia/minzip/remark-parse.svg -In rare situations, you may want to turn off a tokenizer to avoid parsing -that syntactic feature. This can be done by deleting the tokenizer from -your Parser’s `blockTokenizers` (or `blockMethods`) or `inlineTokenizers` -(or `inlineMethods`). +[size]: https://bundlephobia.com/result?p=remark-parse -The following example turns off indented code blocks: +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg -```js -delete remarkParse.Parser.prototype.blockTokenizers.indentedCode; -``` +[backers-badge]: https://opencollective.com/unified/backers/badge.svg -## License +[collective]: https://opencollective.com/unified -[MIT][license] © [Titus Wormer][author] +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg -<!-- Definitions --> +[chat]: https://github.com/remarkjs/remark/discussions -[build-badge]: https://img.shields.io/travis/remarkjs/remark.svg +[health]: https://github.com/remarkjs/.github -[build-status]: https://travis-ci.org/remarkjs/remark +[contributing]: https://github.com/remarkjs/.github/blob/HEAD/contributing.md -[coverage-badge]: https://img.shields.io/codecov/c/github/remarkjs/remark.svg +[support]: https://github.com/remarkjs/.github/blob/HEAD/support.md -[coverage-status]: https://codecov.io/github/remarkjs/remark +[coc]: https://github.com/remarkjs/.github/blob/HEAD/code-of-conduct.md -[chat-badge]: https://img.shields.io/gitter/room/remarkjs/Lobby.svg +[ideas]: https://github.com/remarkjs/ideas -[chat]: https://gitter.im/remarkjs/Lobby +[awesome]: https://github.com/remarkjs/awesome-remark -[license]: https://github.com/remarkjs/remark/blob/master/LICENSE +[license]: https://github.com/remarkjs/remark/blob/main/license -[author]: http://wooorm.com +[author]: https://wooorm.com [npm]: https://docs.npmjs.com/cli/install [unified]: https://github.com/unifiedjs/unified -[data]: https://github.com/unifiedjs/unified#processordatakey-value - -[processor]: https://github.com/unifiedjs/remark/blob/master/packages/remark +[remark]: https://github.com/remarkjs/remark/tree/main/packages/remark [mdast]: https://github.com/syntax-tree/mdast -[escapes]: http://spec.commonmark.org/0.25/#backslash-escapes - -[node]: https://github.com/syntax-tree/unist#node - -[location]: https://github.com/syntax-tree/unist#location - [parser]: https://github.com/unifiedjs/unified#processorparser [extend]: #extending-the-parser -[tokenizer]: #function-tokenizereat-value-silent +[xss]: https://en.wikipedia.org/wiki/Cross-site_scripting + +[rehype]: https://github.com/rehypejs/rehype -[locator]: #tokenizerlocatorvalue-fromindex +[sanitize]: https://github.com/rehypejs/rehype-sanitize -[eat]: #eatsubvalue +[micromark]: https://github.com/micromark/micromark -[add]: #addnode-parent +[from-markdown]: https://github.com/syntax-tree/mdast-util-from-markdown -[blocks]: https://github.com/remarkjs/remark/blob/master/packages/remark-parse/lib/block-elements.json +[gfm]: https://github.com/remarkjs/remark-gfm diff --git a/node_modules/remark-parse/types/index.d.ts b/node_modules/remark-parse/types/index.d.ts new file mode 100644 index 00000000..5460e207 --- /dev/null +++ b/node_modules/remark-parse/types/index.d.ts @@ -0,0 +1,14 @@ +// TypeScript Version: 3.0 + +import {Plugin} from 'unified' +import {Options} from 'mdast-util-from-markdown' + +declare namespace remarkParse { + interface Parse extends Plugin<[RemarkParseOptions?]> {} + + type RemarkParseOptions = Options +} + +declare const remarkParse: remarkParse.Parse + +export = remarkParse diff --git a/node_modules/replace-ext/LICENSE b/node_modules/replace-ext/LICENSE deleted file mode 100755 index fd38d693..00000000 --- a/node_modules/replace-ext/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Blaine Bublitz <blaine.bublitz@gmail.com>, Eric Schoffstall <yo@contra.io> and other contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/replace-ext/README.md b/node_modules/replace-ext/README.md deleted file mode 100644 index 8775983b..00000000 --- a/node_modules/replace-ext/README.md +++ /dev/null @@ -1,50 +0,0 @@ -<p align="center"> - <a href="http://gulpjs.com"> - <img height="257" width="114" src="https://raw.githubusercontent.com/gulpjs/artwork/master/gulp-2x.png"> - </a> -</p> - -# replace-ext - -[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][travis-image]][travis-url] [![AppVeyor Build Status][appveyor-image]][appveyor-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Gitter chat][gitter-image]][gitter-url] - -Replaces a file extension with another one. - -## Usage - -```js -var replaceExt = require('replace-ext'); - -var path = '/some/dir/file.js'; -var newPath = replaceExt(path, '.coffee'); - -console.log(newPath); // /some/dir/file.coffee -``` - -## API - -### `replaceExt(path, extension)` - -Replaces the extension from `path` with `extension` and returns the updated path string. - -Does not replace the extension if `path` is not a string or is empty. - -## License - -MIT - -[downloads-image]: http://img.shields.io/npm/dm/replace-ext.svg -[npm-url]: https://www.npmjs.com/package/replace-ext -[npm-image]: http://img.shields.io/npm/v/replace-ext.svg - -[travis-url]: https://travis-ci.org/gulpjs/replace-ext -[travis-image]: http://img.shields.io/travis/gulpjs/replace-ext.svg?label=travis-ci - -[appveyor-url]: https://ci.appveyor.com/project/gulpjs/replace-ext -[appveyor-image]: https://img.shields.io/appveyor/ci/gulpjs/replace-ext.svg?label=appveyor - -[coveralls-url]: https://coveralls.io/r/gulpjs/replace-ext -[coveralls-image]: http://img.shields.io/coveralls/gulpjs/replace-ext/master.svg - -[gitter-url]: https://gitter.im/gulpjs/gulp -[gitter-image]: https://badges.gitter.im/gulpjs/gulp.svg diff --git a/node_modules/replace-ext/index.js b/node_modules/replace-ext/index.js deleted file mode 100644 index 7cb7789e..00000000 --- a/node_modules/replace-ext/index.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict'; - -var path = require('path'); - -function replaceExt(npath, ext) { - if (typeof npath !== 'string') { - return npath; - } - - if (npath.length === 0) { - return npath; - } - - var nFileName = path.basename(npath, path.extname(npath)) + ext; - return path.join(path.dirname(npath), nFileName); -} - -module.exports = replaceExt; diff --git a/node_modules/replace-ext/package.json b/node_modules/replace-ext/package.json deleted file mode 100644 index 27dbe310..00000000 --- a/node_modules/replace-ext/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "replace-ext", - "version": "1.0.0", - "description": "Replaces a file extension with another one", - "author": "Gulp Team <team@gulpjs.com> (http://gulpjs.com/)", - "contributors": [ - "Eric Schoffstall <yo@contra.io>", - "Blaine Bublitz <blaine.bublitz@gmail.com>" - ], - "repository": "gulpjs/replace-ext", - "license": "MIT", - "engines": { - "node": ">= 0.10" - }, - "main": "index.js", - "files": [ - "LICENSE", - "index.js" - ], - "scripts": { - "lint": "eslint . && jscs index.js test/", - "pretest": "npm run lint", - "test": "mocha --async-only", - "cover": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly", - "coveralls": "npm run cover && istanbul-coveralls" - }, - "dependencies": {}, - "devDependencies": { - "eslint": "^1.10.3", - "eslint-config-gulp": "^2.0.0", - "expect": "^1.16.0", - "istanbul": "^0.4.3", - "istanbul-coveralls": "^1.0.3", - "jscs": "^2.3.5", - "jscs-preset-gulp": "^1.0.0", - "mocha": "^2.4.5" - }, - "keywords": [ - "gulp", - "extensions", - "filepath", - "basename" - ] -} diff --git a/node_modules/state-toggle/index.js b/node_modules/state-toggle/index.js deleted file mode 100644 index aceee00d..00000000 --- a/node_modules/state-toggle/index.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict' - -module.exports = factory - -// Construct a state `toggler`: a function which inverses `property` in context -// based on its current value. -// The by `toggler` returned function restores that value. -function factory(key, state, ctx) { - return enter - - function enter() { - var context = ctx || this - var current = context[key] - - context[key] = !state - - return exit - - function exit() { - context[key] = current - } - } -} diff --git a/node_modules/state-toggle/readme.md b/node_modules/state-toggle/readme.md deleted file mode 100644 index 9fcca1e5..00000000 --- a/node_modules/state-toggle/readme.md +++ /dev/null @@ -1,95 +0,0 @@ -# state-toggle - -[![Build][build-badge]][build] -[![Coverage][coverage-badge]][coverage] -[![Downloads][downloads-badge]][downloads] -[![Size][size-badge]][size] - -Enter/exit a state. - -## Install - -[npm][]: - -```sh -npm install state-toggle -``` - -## Use - -```js -var toggle = require('state-toggle') - -var ctx = {on: false} -var enter = toggle('on', ctx.on, ctx) -var exit - -// Entering: -exit = enter() -console.log(ctx.on) // => true - -// Exiting: -exit() -console.log(ctx.on) // => false -``` - -## API - -### `toggle(key, initial[, ctx])` - -Create a toggle, which when entering toggles `key` on `ctx` (or `this`, if `ctx` -is not given) to `!initial`, and when exiting, sets `key` on the context back to -the value it had before entering. - -###### Returns - -`Function` — [`enter`][enter]. - -### `enter()` - -Enter the state. - -###### Context - -If no `ctx` was given to `toggle`, the context object (`this`) of `enter()` is -used to toggle. - -###### Returns - -`Function` — [`exit`][exit]. - -### `exit()` - -Exit the state, reverting `key` to the value it had before entering. - -## License - -[MIT][license] © [Titus Wormer][author] - -<!-- Definitions --> - -[build-badge]: https://img.shields.io/travis/wooorm/state-toggle.svg - -[build]: https://travis-ci.org/wooorm/state-toggle - -[coverage-badge]: https://img.shields.io/codecov/c/github/wooorm/state-toggle.svg - -[coverage]: https://codecov.io/github/wooorm/state-toggle - -[downloads-badge]: https://img.shields.io/npm/dm/state-toggle.svg - -[downloads]: https://www.npmjs.com/package/state-toggle - -[size-badge]: https://img.shields.io/bundlephobia/minzip/state-toggle.svg - -[size]: https://bundlephobia.com/result?p=state-toggle - -[npm]: https://docs.npmjs.com/cli/install - -[license]: license - -[author]: https://wooorm.com - -[enter]: #enter - -[exit]: #exit diff --git a/node_modules/structured-source/README.md b/node_modules/structured-source/README.md deleted file mode 100644 index 0b4f56d7..00000000 --- a/node_modules/structured-source/README.md +++ /dev/null @@ -1,66 +0,0 @@ -StructuredSource -============== - - -## About - -Provides StructuredSource and functionality for converting range and loc vice versa. - -## Installation - -```sh -npm install structured-source -``` - - -## Usage - -```js -const StructuredSource = require('structured-source'); - -let src = new StructuredSource('aaa\u2028aaaa\u2029aaaaa\n'); - -// positionToIndex({ line: number, column: number) -> number -assert(src.positionToIndex({ line: 1, column: 2 }) === 2); - -// indexToPosition(number) -> { line: number, column: number } -assert.deepEqual(src.indexToPosition(2), { line: 1, column: 2 }); - -// rangeToLocation([ number, number ]) -> { start: { line: number, column: number}, end: { line: number, column: number } } -assert.deepEqual(src.rangeToLocation([0, 2]), { - start: { line: 1, column: 0 }, - end: { line: 1, column: 2 } -}); - -// locationToRange({ start: { line: number, column: number}, end: { line: number, column: number } }) -> [ number, number ] -assert.deepEqual(src.locationToRange({ - start: { line: 1, column: 0 }, - end: { line: 1, column: 2 } -}), [0, 2]); -``` - -### License - -Copyright (C) 2012-2014 [Yusuke Suzuki](http://github.com/Constellation) - (twitter: [@Constellation](http://twitter.com/Constellation)) and other contributors. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF -THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/structured-source/lib/index.js b/node_modules/structured-source/lib/index.js deleted file mode 100644 index f93b69c5..00000000 --- a/node_modules/structured-source/lib/index.js +++ /dev/null @@ -1,9 +0,0 @@ -"use strict"; - -var StructuredSource = require('./structured-source.js')["default"]; - - -module.exports = StructuredSource; - -/* vim: set sw=4 ts=4 et tw=80 : */ -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbImluZGV4LmpzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7O0lBd0JPLGdCQUFnQjs7O0FBRXZCLE1BQU0sQ0FBQyxPQUFPLEdBQUcsZ0JBQWdCLENBQUMiLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VzQ29udGVudCI6WyIvKlxuICBDb3B5cmlnaHQgKEMpIDIwMTQgWXVzdWtlIFN1enVraSA8dXRhdGFuZS50ZWFAZ21haWwuY29tPlxuXG4gIFJlZGlzdHJpYnV0aW9uIGFuZCB1c2UgaW4gc291cmNlIGFuZCBiaW5hcnkgZm9ybXMsIHdpdGggb3Igd2l0aG91dFxuICBtb2RpZmljYXRpb24sIGFyZSBwZXJtaXR0ZWQgcHJvdmlkZWQgdGhhdCB0aGUgZm9sbG93aW5nIGNvbmRpdGlvbnMgYXJlIG1ldDpcblxuICAgICogUmVkaXN0cmlidXRpb25zIG9mIHNvdXJjZSBjb2RlIG11c3QgcmV0YWluIHRoZSBhYm92ZSBjb3B5cmlnaHRcbiAgICAgIG5vdGljZSwgdGhpcyBsaXN0IG9mIGNvbmRpdGlvbnMgYW5kIHRoZSBmb2xsb3dpbmcgZGlzY2xhaW1lci5cbiAgICAqIFJlZGlzdHJpYnV0aW9ucyBpbiBiaW5hcnkgZm9ybSBtdXN0IHJlcHJvZHVjZSB0aGUgYWJvdmUgY29weXJpZ2h0XG4gICAgICBub3RpY2UsIHRoaXMgbGlzdCBvZiBjb25kaXRpb25zIGFuZCB0aGUgZm9sbG93aW5nIGRpc2NsYWltZXIgaW4gdGhlXG4gICAgICBkb2N1bWVudGF0aW9uIGFuZC9vciBvdGhlciBtYXRlcmlhbHMgcHJvdmlkZWQgd2l0aCB0aGUgZGlzdHJpYnV0aW9uLlxuXG4gIFRISVMgU09GVFdBUkUgSVMgUFJPVklERUQgQlkgVEhFIENPUFlSSUdIVCBIT0xERVJTIEFORCBDT05UUklCVVRPUlMgXCJBUyBJU1wiXG4gIEFORCBBTlkgRVhQUkVTUyBPUiBJTVBMSUVEIFdBUlJBTlRJRVMsIElOQ0xVRElORywgQlVUIE5PVCBMSU1JVEVEIFRPLCBUSEVcbiAgSU1QTElFRCBXQVJSQU5USUVTIE9GIE1FUkNIQU5UQUJJTElUWSBBTkQgRklUTkVTUyBGT1IgQSBQQVJUSUNVTEFSIFBVUlBPU0VcbiAgQVJFIERJU0NMQUlNRUQuIElOIE5PIEVWRU5UIFNIQUxMIDxDT1BZUklHSFQgSE9MREVSPiBCRSBMSUFCTEUgRk9SIEFOWVxuICBESVJFQ1QsIElORElSRUNULCBJTkNJREVOVEFMLCBTUEVDSUFMLCBFWEVNUExBUlksIE9SIENPTlNFUVVFTlRJQUwgREFNQUdFU1xuICAoSU5DTFVESU5HLCBCVVQgTk9UIExJTUlURUQgVE8sIFBST0NVUkVNRU5UIE9GIFNVQlNUSVRVVEUgR09PRFMgT1IgU0VSVklDRVM7XG4gIExPU1MgT0YgVVNFLCBEQVRBLCBPUiBQUk9GSVRTOyBPUiBCVVNJTkVTUyBJTlRFUlJVUFRJT04pIEhPV0VWRVIgQ0FVU0VEIEFORFxuICBPTiBBTlkgVEhFT1JZIE9GIExJQUJJTElUWSwgV0hFVEhFUiBJTiBDT05UUkFDVCwgU1RSSUNUIExJQUJJTElUWSwgT1IgVE9SVFxuICAoSU5DTFVESU5HIE5FR0xJR0VOQ0UgT1IgT1RIRVJXSVNFKSBBUklTSU5HIElOIEFOWSBXQVkgT1VUIE9GIFRIRSBVU0UgT0ZcbiAgVEhJUyBTT0ZUV0FSRSwgRVZFTiBJRiBBRFZJU0VEIE9GIFRIRSBQT1NTSUJJTElUWSBPRiBTVUNIIERBTUFHRS5cbiovXG5cbmltcG9ydCBTdHJ1Y3R1cmVkU291cmNlIGZyb20gJy4vc3RydWN0dXJlZC1zb3VyY2UuanMnXG5cbm1vZHVsZS5leHBvcnRzID0gU3RydWN0dXJlZFNvdXJjZTtcblxuLyogdmltOiBzZXQgc3c9NCB0cz00IGV0IHR3PTgwIDogKi9cbiJdLCJzb3VyY2VSb290IjoiL3NvdXJjZS8ifQ== \ No newline at end of file diff --git a/node_modules/structured-source/lib/structured-source.js b/node_modules/structured-source/lib/structured-source.js deleted file mode 100644 index e0dbd86b..00000000 --- a/node_modules/structured-source/lib/structured-source.js +++ /dev/null @@ -1,84 +0,0 @@ -"use strict"; - -var _classProps = function (child, staticProps, instanceProps) { - if (staticProps) Object.defineProperties(child, staticProps); - if (instanceProps) Object.defineProperties(child.prototype, instanceProps); -}; - -var upperBound = require('boundary').upperBound; -var Position = function Position(line, column) { - this.line = line; - this.column = column; -}; - -exports.Position = Position; -var SourceLocation = function SourceLocation(start, end) { - this.start = start; - this.end = end; -}; - -exports.SourceLocation = SourceLocation; -var StructuredSource = (function () { - var StructuredSource = - /** - * @constructs StructuredSource - * @param {string} source - source code text. - */ - function StructuredSource(source) { - this.indice = [0]; - var regexp = /[\r\n\u2028\u2029]/g; - var length = source.length; - regexp.lastIndex = 0; - while (true) { - var result = regexp.exec(source); - if (!result) { - break; - } - var index = result.index; - if (source.charCodeAt(index) === 13 /* '\r' */ && source.charCodeAt(index + 1) === 10 /* '\n' */) { - index += 1; - } - var nextIndex = index + 1; - // If there's a last line terminator, we push it to the indice. - // So use < instead of <=. - if (length < nextIndex) { - break; - } - this.indice.push(nextIndex); - regexp.lastIndex = nextIndex; - } - }; - - StructuredSource.prototype.locationToRange = function (loc) { - return [this.positionToIndex(loc.start), this.positionToIndex(loc.end)]; - }; - - StructuredSource.prototype.rangeToLocation = function (range) { - return new SourceLocation(this.indexToPosition(range[0]), this.indexToPosition(range[1])); - }; - - StructuredSource.prototype.positionToIndex = function (pos) { - // Line number starts with 1. - // Column number starts with 0. - var start = this.indice[pos.line - 1]; - return start + pos.column; - }; - - StructuredSource.prototype.indexToPosition = function (index) { - var startLine = upperBound(this.indice, index); - return new Position(startLine, index - this.indice[startLine - 1]); - }; - - _classProps(StructuredSource, null, { - line: { - get: function () { - return this.indice.length; - } - } - }); - - return StructuredSource; -})(); - -exports["default"] = StructuredSource; -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbInN0cnVjdHVyZWQtc291cmNlLmpzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7SUF3QlMsVUFBVSx1QkFBVixVQUFVO0lBRU4sUUFBUSxHQUNOLFNBREYsUUFBUSxDQUNMLElBQUksRUFBRSxNQUFNLEVBQUU7QUFDdEIsTUFBSSxDQUFDLElBQUksR0FBRyxJQUFJLENBQUM7QUFDakIsTUFBSSxDQUFDLE1BQU0sR0FBRyxNQUFNLENBQUM7Q0FDeEI7O1FBSlEsUUFBUSxHQUFSLFFBQVE7SUFPUixjQUFjLEdBQ1osU0FERixjQUFjLENBQ1gsS0FBSyxFQUFFLEdBQUcsRUFBRTtBQUNwQixNQUFJLENBQUMsS0FBSyxHQUFHLEtBQUssQ0FBQztBQUNuQixNQUFJLENBQUMsR0FBRyxHQUFHLEdBQUcsQ0FBQztDQUNsQjs7UUFKUSxjQUFjLEdBQWQsY0FBYztJQVdOLGdCQUFnQjtNQUFoQixnQkFBZ0I7Ozs7O0FBS3RCLFdBTE0sZ0JBQWdCLENBS3JCLE1BQU0sRUFBRTtBQUNoQixRQUFJLENBQUMsTUFBTSxHQUFHLENBQUUsQ0FBQyxDQUFFLENBQUM7QUFDcEIsUUFBSSxNQUFNLEdBQUcscUJBQXFCLENBQUM7QUFDbkMsUUFBSSxNQUFNLEdBQUcsTUFBTSxDQUFDLE1BQU0sQ0FBQztBQUMzQixVQUFNLENBQUMsU0FBUyxHQUFHLENBQUMsQ0FBQztBQUNyQixXQUFPLElBQUksRUFBRTtBQUNULFVBQUksTUFBTSxHQUFHLE1BQU0sQ0FBQyxJQUFJLENBQUMsTUFBTSxDQUFDLENBQUM7QUFDakMsVUFBSSxDQUFDLE1BQU0sRUFBRTtBQUNULGNBQU07T0FDVDtBQUNELFVBQUksS0FBSyxHQUFHLE1BQU0sQ0FBQyxLQUFLLENBQUM7QUFDekIsVUFBSSxNQUFNLENBQUMsVUFBVSxDQUFDLEtBQUssQ0FBQyxLQUFLLEVBQUksV0FBQSxJQUM3QixNQUFNLENBQUMsVUFBVSxDQUFDLEtBQUssR0FBRyxDQUFDLENBQUMsS0FBSyxFQUFJLFdBQUEsRUFBYztBQUN2RCxhQUFLLElBQUksQ0FBQyxDQUFDO09BQ2Q7QUFDRCxVQUFJLFNBQVMsR0FBRyxLQUFLLEdBQUcsQ0FBQyxDQUFDOzs7QUFHMUIsVUFBSSxNQUFNLEdBQUcsU0FBUyxFQUFFO0FBQ3BCLGNBQU07T0FDVDtBQUNELFVBQUksQ0FBQyxNQUFNLENBQUMsSUFBSSxDQUFDLFNBQVMsQ0FBQyxDQUFDO0FBQzVCLFlBQU0sQ0FBQyxTQUFTLEdBQUcsU0FBUyxDQUFDO0tBQ2hDO0dBQ0o7O0FBN0JnQixrQkFBZ0IsV0F1Q2pDLGVBQWUsR0FBQSxVQUFDLEdBQUcsRUFBRTtBQUNqQixXQUFPLENBQUUsSUFBSSxDQUFDLGVBQWUsQ0FBQyxHQUFHLENBQUMsS0FBSyxDQUFDLEVBQUUsSUFBSSxDQUFDLGVBQWUsQ0FBQyxHQUFHLENBQUMsR0FBRyxDQUFDLENBQUUsQ0FBQztHQUM3RTs7QUF6Q2dCLGtCQUFnQixXQStDakMsZUFBZSxHQUFBLFVBQUMsS0FBSyxFQUFFO0FBQ25CLFdBQU8sSUFBSSxjQUFjLENBQUMsSUFBSSxDQUFDLGVBQWUsQ0FBQyxLQUFLLENBQUMsQ0FBQyxDQUFDLENBQUMsRUFBRSxJQUFJLENBQUMsZUFBZSxDQUFDLEtBQUssQ0FBQyxDQUFDLENBQUMsQ0FBQyxDQUFDLENBQUM7R0FDN0Y7O0FBakRnQixrQkFBZ0IsV0F1RGpDLGVBQWUsR0FBQSxVQUFDLEdBQUcsRUFBRTs7O0FBR2pCLFFBQUksS0FBSyxHQUFHLElBQUksQ0FBQyxNQUFNLENBQUMsR0FBRyxDQUFDLElBQUksR0FBRyxDQUFDLENBQUMsQ0FBQztBQUN0QyxXQUFPLEtBQUssR0FBRyxHQUFHLENBQUMsTUFBTSxDQUFDO0dBQzdCOztBQTVEZ0Isa0JBQWdCLFdBa0VqQyxlQUFlLEdBQUEsVUFBQyxLQUFLLEVBQUU7QUFDbkIsUUFBSSxTQUFTLEdBQUcsVUFBVSxDQUFDLElBQUksQ0FBQyxNQUFNLEVBQUUsS0FBSyxDQUFDLENBQUM7QUFDL0MsV0FBTyxJQUFJLFFBQVEsQ0FBQyxTQUFTLEVBQUUsS0FBSyxHQUFHLElBQUksQ0FBQyxNQUFNLENBQUMsU0FBUyxHQUFHLENBQUMsQ0FBQyxDQUFDLENBQUM7R0FDdEU7O2NBckVnQixnQkFBZ0I7QUErQjdCLFFBQUk7V0FBQSxZQUFHO0FBQ1AsZUFBTyxJQUFJLENBQUMsTUFBTSxDQUFDLE1BQU0sQ0FBQztPQUM3Qjs7OztTQWpDZ0IsZ0JBQWdCOzs7cUJBQWhCLGdCQUFnQiIsImZpbGUiOiJzdHJ1Y3R1cmVkLXNvdXJjZS5qcyIsInNvdXJjZXNDb250ZW50IjpbIi8qXG4gIENvcHlyaWdodCAoQykgMjAxNCBZdXN1a2UgU3V6dWtpIDx1dGF0YW5lLnRlYUBnbWFpbC5jb20+XG5cbiAgUmVkaXN0cmlidXRpb24gYW5kIHVzZSBpbiBzb3VyY2UgYW5kIGJpbmFyeSBmb3Jtcywgd2l0aCBvciB3aXRob3V0XG4gIG1vZGlmaWNhdGlvbiwgYXJlIHBlcm1pdHRlZCBwcm92aWRlZCB0aGF0IHRoZSBmb2xsb3dpbmcgY29uZGl0aW9ucyBhcmUgbWV0OlxuXG4gICAgKiBSZWRpc3RyaWJ1dGlvbnMgb2Ygc291cmNlIGNvZGUgbXVzdCByZXRhaW4gdGhlIGFib3ZlIGNvcHlyaWdodFxuICAgICAgbm90aWNlLCB0aGlzIGxpc3Qgb2YgY29uZGl0aW9ucyBhbmQgdGhlIGZvbGxvd2luZyBkaXNjbGFpbWVyLlxuICAgICogUmVkaXN0cmlidXRpb25zIGluIGJpbmFyeSBmb3JtIG11c3QgcmVwcm9kdWNlIHRoZSBhYm92ZSBjb3B5cmlnaHRcbiAgICAgIG5vdGljZSwgdGhpcyBsaXN0IG9mIGNvbmRpdGlvbnMgYW5kIHRoZSBmb2xsb3dpbmcgZGlzY2xhaW1lciBpbiB0aGVcbiAgICAgIGRvY3VtZW50YXRpb24gYW5kL29yIG90aGVyIG1hdGVyaWFscyBwcm92aWRlZCB3aXRoIHRoZSBkaXN0cmlidXRpb24uXG5cbiAgVEhJUyBTT0ZUV0FSRSBJUyBQUk9WSURFRCBCWSBUSEUgQ09QWVJJR0hUIEhPTERFUlMgQU5EIENPTlRSSUJVVE9SUyBcIkFTIElTXCJcbiAgQU5EIEFOWSBFWFBSRVNTIE9SIElNUExJRUQgV0FSUkFOVElFUywgSU5DTFVESU5HLCBCVVQgTk9UIExJTUlURUQgVE8sIFRIRVxuICBJTVBMSUVEIFdBUlJBTlRJRVMgT0YgTUVSQ0hBTlRBQklMSVRZIEFORCBGSVRORVNTIEZPUiBBIFBBUlRJQ1VMQVIgUFVSUE9TRVxuICBBUkUgRElTQ0xBSU1FRC4gSU4gTk8gRVZFTlQgU0hBTEwgPENPUFlSSUdIVCBIT0xERVI+IEJFIExJQUJMRSBGT1IgQU5ZXG4gIERJUkVDVCwgSU5ESVJFQ1QsIElOQ0lERU5UQUwsIFNQRUNJQUwsIEVYRU1QTEFSWSwgT1IgQ09OU0VRVUVOVElBTCBEQU1BR0VTXG4gIChJTkNMVURJTkcsIEJVVCBOT1QgTElNSVRFRCBUTywgUFJPQ1VSRU1FTlQgT0YgU1VCU1RJVFVURSBHT09EUyBPUiBTRVJWSUNFUztcbiAgTE9TUyBPRiBVU0UsIERBVEEsIE9SIFBST0ZJVFM7IE9SIEJVU0lORVNTIElOVEVSUlVQVElPTikgSE9XRVZFUiBDQVVTRUQgQU5EXG4gIE9OIEFOWSBUSEVPUlkgT0YgTElBQklMSVRZLCBXSEVUSEVSIElOIENPTlRSQUNULCBTVFJJQ1QgTElBQklMSVRZLCBPUiBUT1JUXG4gIChJTkNMVURJTkcgTkVHTElHRU5DRSBPUiBPVEhFUldJU0UpIEFSSVNJTkcgSU4gQU5ZIFdBWSBPVVQgT0YgVEhFIFVTRSBPRlxuICBUSElTIFNPRlRXQVJFLCBFVkVOIElGIEFEVklTRUQgT0YgVEhFIFBPU1NJQklMSVRZIE9GIFNVQ0ggREFNQUdFLlxuKi9cblxuaW1wb3J0IHsgdXBwZXJCb3VuZCB9IGZyb20gJ2JvdW5kYXJ5JztcblxuZXhwb3J0IGNsYXNzIFBvc2l0aW9uIHtcbiAgICBjb25zdHJ1Y3RvcihsaW5lLCBjb2x1bW4pIHtcbiAgICAgICAgdGhpcy5saW5lID0gbGluZTtcbiAgICAgICAgdGhpcy5jb2x1bW4gPSBjb2x1bW47XG4gICAgfVxufVxuXG5leHBvcnQgY2xhc3MgU291cmNlTG9jYXRpb24ge1xuICAgIGNvbnN0cnVjdG9yKHN0YXJ0LCBlbmQpIHtcbiAgICAgICAgdGhpcy5zdGFydCA9IHN0YXJ0O1xuICAgICAgICB0aGlzLmVuZCA9IGVuZDtcbiAgICB9XG59XG5cbi8qKlxuICogU3RydWN0dXJlZFNvdXJjZVxuICogQGNsYXNzXG4gKi9cbmV4cG9ydCBkZWZhdWx0IGNsYXNzIFN0cnVjdHVyZWRTb3VyY2Uge1xuICAgIC8qKlxuICAgICAqIEBjb25zdHJ1Y3RzIFN0cnVjdHVyZWRTb3VyY2VcbiAgICAgKiBAcGFyYW0ge3N0cmluZ30gc291cmNlIC0gc291cmNlIGNvZGUgdGV4dC5cbiAgICAgKi9cbiAgICBjb25zdHJ1Y3Rvcihzb3VyY2UpIHtcbiAgICAgICAgdGhpcy5pbmRpY2UgPSBbIDAgXTtcbiAgICAgICAgbGV0IHJlZ2V4cCA9IC9bXFxyXFxuXFx1MjAyOFxcdTIwMjldL2c7XG4gICAgICAgIGxldCBsZW5ndGggPSBzb3VyY2UubGVuZ3RoO1xuICAgICAgICByZWdleHAubGFzdEluZGV4ID0gMDtcbiAgICAgICAgd2hpbGUgKHRydWUpIHtcbiAgICAgICAgICAgIGxldCByZXN1bHQgPSByZWdleHAuZXhlYyhzb3VyY2UpO1xuICAgICAgICAgICAgaWYgKCFyZXN1bHQpIHtcbiAgICAgICAgICAgICAgICBicmVhaztcbiAgICAgICAgICAgIH1cbiAgICAgICAgICAgIGxldCBpbmRleCA9IHJlc3VsdC5pbmRleDtcbiAgICAgICAgICAgIGlmIChzb3VyY2UuY2hhckNvZGVBdChpbmRleCkgPT09IDB4MEQgIC8qICdcXHInICovICYmXG4gICAgICAgICAgICAgICAgICAgIHNvdXJjZS5jaGFyQ29kZUF0KGluZGV4ICsgMSkgPT09IDB4MEEgIC8qICdcXG4nICovKSB7XG4gICAgICAgICAgICAgICAgaW5kZXggKz0gMTtcbiAgICAgICAgICAgIH1cbiAgICAgICAgICAgIGxldCBuZXh0SW5kZXggPSBpbmRleCArIDE7XG4gICAgICAgICAgICAvLyBJZiB0aGVyZSdzIGEgbGFzdCBsaW5lIHRlcm1pbmF0b3IsIHdlIHB1c2ggaXQgdG8gdGhlIGluZGljZS5cbiAgICAgICAgICAgIC8vIFNvIHVzZSA8IGluc3RlYWQgb2YgPD0uXG4gICAgICAgICAgICBpZiAobGVuZ3RoIDwgbmV4dEluZGV4KSB7XG4gICAgICAgICAgICAgICAgYnJlYWs7XG4gICAgICAgICAgICB9XG4gICAgICAgICAgICB0aGlzLmluZGljZS5wdXNoKG5leHRJbmRleCk7XG4gICAgICAgICAgICByZWdleHAubGFzdEluZGV4ID0gbmV4dEluZGV4O1xuICAgICAgICB9XG4gICAgfVxuXG4gICAgZ2V0IGxpbmUoKSB7XG4gICAgICAgIHJldHVybiB0aGlzLmluZGljZS5sZW5ndGg7XG4gICAgfVxuXG4gICAgLyoqXG4gICAgICogQHBhcmFtIHtTb3VyY2VMb2NhdGlvbn0gbG9jIC0gbG9jYXRpb24gaW5kaWNhdG9yLlxuICAgICAqIEByZXR1cm4ge1sgbnVtYmVyLCBudW1iZXIgXX0gcmFuZ2UuXG4gICAgICovXG4gICAgbG9jYXRpb25Ub1JhbmdlKGxvYykge1xuICAgICAgICByZXR1cm4gWyB0aGlzLnBvc2l0aW9uVG9JbmRleChsb2Muc3RhcnQpLCB0aGlzLnBvc2l0aW9uVG9JbmRleChsb2MuZW5kKSBdO1xuICAgIH1cblxuICAgIC8qKlxuICAgICAqIEBwYXJhbSB7WyBudW1iZXIsIG51bWJlciBdfSByYW5nZSAtIHBhaXIgb2YgaW5kaWNlLlxuICAgICAqIEByZXR1cm4ge1NvdXJjZUxvY2F0aW9ufSBsb2NhdGlvbi5cbiAgICAgKi9cbiAgICByYW5nZVRvTG9jYXRpb24ocmFuZ2UpIHtcbiAgICAgICAgcmV0dXJuIG5ldyBTb3VyY2VMb2NhdGlvbih0aGlzLmluZGV4VG9Qb3NpdGlvbihyYW5nZVswXSksIHRoaXMuaW5kZXhUb1Bvc2l0aW9uKHJhbmdlWzFdKSk7XG4gICAgfVxuXG4gICAgLyoqXG4gICAgICogQHBhcmFtIHtQb3NpdGlvbn0gcG9zIC0gcG9zaXRpb24gaW5kaWNhdG9yLlxuICAgICAqIEByZXR1cm4ge251bWJlcn0gaW5kZXguXG4gICAgICovXG4gICAgcG9zaXRpb25Ub0luZGV4KHBvcykge1xuICAgICAgICAvLyBMaW5lIG51bWJlciBzdGFydHMgd2l0aCAxLlxuICAgICAgICAvLyBDb2x1bW4gbnVtYmVyIHN0YXJ0cyB3aXRoIDAuXG4gICAgICAgIGxldCBzdGFydCA9IHRoaXMuaW5kaWNlW3Bvcy5saW5lIC0gMV07XG4gICAgICAgIHJldHVybiBzdGFydCArIHBvcy5jb2x1bW47XG4gICAgfVxuXG4gICAgLyoqXG4gICAgICogQHBhcmFtIHtudW1iZXJ9IGluZGV4IC0gaW5kZXggdG8gdGhlIHNvdXJjZSBjb2RlLlxuICAgICAqIEByZXR1cm4ge1Bvc2l0aW9ufSBwb3NpdGlvbi5cbiAgICAgKi9cbiAgICBpbmRleFRvUG9zaXRpb24oaW5kZXgpIHtcbiAgICAgICAgbGV0IHN0YXJ0TGluZSA9IHVwcGVyQm91bmQodGhpcy5pbmRpY2UsIGluZGV4KTtcbiAgICAgICAgcmV0dXJuIG5ldyBQb3NpdGlvbihzdGFydExpbmUsIGluZGV4IC0gdGhpcy5pbmRpY2Vbc3RhcnRMaW5lIC0gMV0pO1xuICAgIH1cbn07XG5cbi8qIHZpbTogc2V0IHN3PTQgdHM9NCBldCB0dz04MCA6ICovXG4iXSwic291cmNlUm9vdCI6Ii9zb3VyY2UvIn0= \ No newline at end of file diff --git a/node_modules/structured-source/package.json b/node_modules/structured-source/package.json deleted file mode 100644 index b5048b04..00000000 --- a/node_modules/structured-source/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "name": "structured-source", - "version": "3.0.2", - "description": "Provides StructuredSource and functionality for converting range and loc vice versa.", - "author": "Yusuke SUZUKI", - "homepage": "https://github.com/Constellation/structured-source", - "repository": { - "type": "git", - "url": "https://github.com/Constellation/structured-source.git" - }, - "main": "lib/index.js", - "maintainers": [ - { - "name": "Yusuke SUZUKI", - "email": "utatane.tea@gmail.com", - "web": "http://github.com/Constellation" - } - ], - "files": [ - "lib" - ], - "scripts": { - "test": "gulp test" - }, - "dependencies": { - "boundary": "^1.0.1" - }, - "devDependencies": { - "gulp": "^3.8.10", - "gulp-6to5": "^1.0.2", - "gulp-bump": "^0.1.11", - "gulp-espower": "^0.10.0", - "gulp-filter": "^2.0.0", - "gulp-git": "^0.5.5", - "gulp-mocha": "^2.0.0", - "gulp-sourcemaps": "^1.2.8", - "gulp-tag-version": "^1.2.1", - "power-assert": "^0.10.0" - }, - "keywords": [ - "location", - "range", - "abstract", - "syntax", - "tree" - ], - "bugs": { - "url": "https://github.com/Constellation/structured-source/issues" - }, - "licenses": [ - { - "type": "BSD", - "url": "http://github.com/Constellation/structured-source/raw/master/LICENSE.BSD" - } - ] -} diff --git a/node_modules/trim-trailing-lines/index.js b/node_modules/trim-trailing-lines/index.js deleted file mode 100644 index eff85c6b..00000000 --- a/node_modules/trim-trailing-lines/index.js +++ /dev/null @@ -1,8 +0,0 @@ -'use strict' - -module.exports = trimTrailingLines - -// Remove final newline characters from `value`. -function trimTrailingLines(value) { - return String(value).replace(/\n+$/, '') -} diff --git a/node_modules/trim-trailing-lines/readme.md b/node_modules/trim-trailing-lines/readme.md deleted file mode 100644 index a9c1f441..00000000 --- a/node_modules/trim-trailing-lines/readme.md +++ /dev/null @@ -1,68 +0,0 @@ -# trim-trailing-lines - -[![Build][build-badge]][build] -[![Coverage][coverage-badge]][coverage] -[![Downloads][downloads-badge]][downloads] -[![Size][size-badge]][size] - -Remove final line feeds from a string. - -## Install - -[npm][]: - -```sh -npm install trim-trailing-lines -``` - -## Use - -```js -var trimTrailingLines = require('trim-trailing-lines') - -trimTrailingLines('foo\nbar') // => 'foo\nbar' -trimTrailingLines('foo\nbar\n') // => 'foo\nbar' -trimTrailingLines('foo\nbar\n\n') // => 'foo\nbar' -``` - -## API - -### `trimTrailingLines(value)` - -Remove final line feed characters from `value`. - -###### Parameters - -* `value` (`string`) — Value with trailing line feeds, coerced to string. - -###### Returns - -`string` — Value without trailing newlines. - -## License - -[MIT][license] © [Titus Wormer][author] - -<!-- Definitions --> - -[build-badge]: https://img.shields.io/travis/wooorm/trim-trailing-lines.svg - -[build]: https://travis-ci.org/wooorm/trim-trailing-lines - -[coverage-badge]: https://img.shields.io/codecov/c/github/wooorm/trim-trailing-lines.svg - -[coverage]: https://codecov.io/github/wooorm/trim-trailing-lines - -[downloads-badge]: https://img.shields.io/npm/dm/trim-trailing-lines.svg - -[downloads]: https://www.npmjs.com/package/trim-trailing-lines - -[size-badge]: https://img.shields.io/bundlephobia/minzip/trim-trailing-lines.svg - -[size]: https://bundlephobia.com/result?p=trim-trailing-lines - -[npm]: https://docs.npmjs.com/cli/install - -[license]: license - -[author]: https://wooorm.com diff --git a/node_modules/trim/.npmignore b/node_modules/trim/.npmignore deleted file mode 100644 index f1250e58..00000000 --- a/node_modules/trim/.npmignore +++ /dev/null @@ -1,4 +0,0 @@ -support -test -examples -*.sock diff --git a/node_modules/trim/History.md b/node_modules/trim/History.md deleted file mode 100644 index c8aa68fa..00000000 --- a/node_modules/trim/History.md +++ /dev/null @@ -1,5 +0,0 @@ - -0.0.1 / 2010-01-03 -================== - - * Initial release diff --git a/node_modules/trim/Makefile b/node_modules/trim/Makefile deleted file mode 100644 index 4e9c8d36..00000000 --- a/node_modules/trim/Makefile +++ /dev/null @@ -1,7 +0,0 @@ - -test: - @./node_modules/.bin/mocha \ - --require should \ - --reporter spec - -.PHONY: test \ No newline at end of file diff --git a/node_modules/trim/Readme.md b/node_modules/trim/Readme.md deleted file mode 100644 index 3460f523..00000000 --- a/node_modules/trim/Readme.md +++ /dev/null @@ -1,69 +0,0 @@ - -# trim - - Trims string whitespace. - -## Installation - -``` -$ npm install trim -$ component install component/trim -``` - -## API - - - [trim(str)](#trimstr) - - [.left(str)](#leftstr) - - [.right(str)](#rightstr) -<a name="" /> - -<a name="trimstr" /> -### trim(str) -should trim leading / trailing whitespace. - -```js -trim(' foo bar ').should.equal('foo bar'); -trim('\n\n\nfoo bar\n\r\n\n').should.equal('foo bar'); -``` - -<a name="leftstr" /> -### .left(str) -should trim leading whitespace. - -```js -trim.left(' foo bar ').should.equal('foo bar '); -``` - -<a name="rightstr" /> -### .right(str) -should trim trailing whitespace. - -```js -trim.right(' foo bar ').should.equal(' foo bar'); -``` - - -## License - -(The MIT License) - -Copyright (c) 2012 TJ Holowaychuk &lt;tj@vision-media.ca&gt; - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/trim/component.json b/node_modules/trim/component.json deleted file mode 100644 index 560b2589..00000000 --- a/node_modules/trim/component.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "trim", - "version": "0.0.1", - "description": "Trim string whitespace", - "keywords": ["string", "trim"], - "scripts": ["index.js"] -} \ No newline at end of file diff --git a/node_modules/trim/index.js b/node_modules/trim/index.js deleted file mode 100644 index 640c24cf..00000000 --- a/node_modules/trim/index.js +++ /dev/null @@ -1,14 +0,0 @@ - -exports = module.exports = trim; - -function trim(str){ - return str.replace(/^\s*|\s*$/g, ''); -} - -exports.left = function(str){ - return str.replace(/^\s*/, ''); -}; - -exports.right = function(str){ - return str.replace(/\s*$/, ''); -}; diff --git a/node_modules/trim/package.json b/node_modules/trim/package.json deleted file mode 100644 index 64ee5c69..00000000 --- a/node_modules/trim/package.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "trim", - "version": "0.0.1", - "description": "Trim string whitespace", - "keywords": ["string", "trim"], - "author": "TJ Holowaychuk <tj@vision-media.ca>", - "dependencies": {}, - "devDependencies": { - "mocha": "*", - "should": "*" - }, - "main": "index", - "component": { - "scripts": { - "trim/index.js": "index.js" - } - } -} diff --git a/node_modules/unherit/index.js b/node_modules/unherit/index.js deleted file mode 100644 index 32ead777..00000000 --- a/node_modules/unherit/index.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict' - -var xtend = require('xtend') -var inherits = require('inherits') - -module.exports = unherit - -// Create a custom constructor which can be modified without affecting the -// original class. -function unherit(Super) { - var result - var key - var value - - inherits(Of, Super) - inherits(From, Of) - - // Clone values. - result = Of.prototype - - for (key in result) { - value = result[key] - - if (value && typeof value === 'object') { - result[key] = 'concat' in value ? value.concat() : xtend(value) - } - } - - return Of - - // Constructor accepting a single argument, which itself is an `arguments` - // object. - function From(parameters) { - return Super.apply(this, parameters) - } - - // Constructor accepting variadic arguments. - function Of() { - if (!(this instanceof Of)) { - return new From(arguments) - } - - return Super.apply(this, arguments) - } -} diff --git a/node_modules/unherit/license b/node_modules/unherit/license deleted file mode 100644 index f3722d94..00000000 --- a/node_modules/unherit/license +++ /dev/null @@ -1,21 +0,0 @@ -(The MIT License) - -Copyright (c) 2015 Titus Wormer <tituswormer@gmail.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/unherit/package.json b/node_modules/unherit/package.json deleted file mode 100644 index 445a5006..00000000 --- a/node_modules/unherit/package.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "name": "unherit", - "version": "1.1.3", - "description": "Clone a constructor without affecting the super-class", - "license": "MIT", - "keywords": [ - "clone", - "super", - "class", - "constructor" - ], - "repository": "wooorm/unherit", - "bugs": "https://github.com/wooorm/unherit/issues", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - }, - "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", - "contributors": [ - "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)" - ], - "files": [ - "index.js" - ], - "dependencies": { - "inherits": "^2.0.0", - "xtend": "^4.0.0" - }, - "devDependencies": { - "browserify": "^16.0.0", - "nyc": "^15.0.0", - "prettier": "^1.0.0", - "remark-cli": "^7.0.0", - "remark-preset-wooorm": "^6.0.0", - "tape": "^4.0.0", - "tinyify": "^2.0.0", - "xo": "^0.25.0" - }, - "scripts": { - "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", - "build-bundle": "browserify . -s unherit -o unherit.js", - "build-mangle": "browserify . -s unherit -p tinyify -o unherit.min.js", - "build": "npm run build-bundle && npm run build-mangle", - "test-api": "node test", - "test-coverage": "nyc --reporter lcov tape test.js", - "test": "npm run format && npm run build && npm run test-coverage" - }, - "prettier": { - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "bracketSpacing": false, - "semi": false, - "trailingComma": "none" - }, - "xo": { - "prettier": true, - "esnext": false, - "rules": { - "unicorn/prefer-reflect-apply": "off", - "guard-for-in": "off" - }, - "ignores": [ - "unherit.js" - ] - }, - "remarkConfig": { - "plugins": [ - "preset-wooorm" - ] - } -} diff --git a/node_modules/unherit/readme.md b/node_modules/unherit/readme.md deleted file mode 100644 index bf679597..00000000 --- a/node_modules/unherit/readme.md +++ /dev/null @@ -1,79 +0,0 @@ -# unherit - -[![Build][build-badge]][build] -[![Coverage][coverage-badge]][coverage] -[![Downloads][downloads-badge]][downloads] -[![Size][size-badge]][size] - -Create a custom constructor which can be modified without affecting the original -class. - -## Install - -[npm][]: - -```sh -npm install unherit -``` - -## Use - -```js -var EventEmitter = require('events').EventEmitter -var unherit = require('unherit') - -// Create a private class which acts just like `EventEmitter`. -var Emitter = unherit(EventEmitter) - -Emitter.prototype.defaultMaxListeners = 0 -// Now, all instances of `Emitter` have no maximum listeners, without affecting -// other `EventEmitter`s. - -new Emitter().defaultMaxListeners === 0 // => true -new EventEmitter().defaultMaxListeners === undefined // => true -new Emitter() instanceof EventEmitter // => true -``` - -## API - -### `unherit(Super)` - -Create a custom constructor which can be modified without affecting the original -class. - -###### Parameters - -* `Super` (`Function`) — Super-class - -###### Returns - -`Function` — Constructor acting like `Super`, which can be modified without -affecting the original class. - -## License - -[MIT][license] © [Titus Wormer][author] - -<!-- Definitions --> - -[build-badge]: https://img.shields.io/travis/wooorm/unherit.svg - -[build]: https://travis-ci.org/wooorm/unherit - -[coverage-badge]: https://img.shields.io/codecov/c/github/wooorm/unherit.svg - -[coverage]: https://codecov.io/github/wooorm/unherit - -[downloads-badge]: https://img.shields.io/npm/dm/unherit.svg - -[downloads]: https://www.npmjs.com/package/unherit - -[size-badge]: https://img.shields.io/bundlephobia/minzip/unherit.svg - -[size]: https://bundlephobia.com/result?p=unherit - -[npm]: https://docs.npmjs.com/cli/install - -[license]: license - -[author]: https://wooorm.com diff --git a/node_modules/unified/changelog.md b/node_modules/unified/changelog.md new file mode 100644 index 00000000..b50c832d --- /dev/null +++ b/node_modules/unified/changelog.md @@ -0,0 +1,5 @@ +# Changelog + +See [GitHub Releases][releases] for the changelog. + +[releases]: https://github.com/unifiedjs/unified/releases diff --git a/node_modules/unified/index.js b/node_modules/unified/index.js index b8f9011d..6cb1e261 100644 --- a/node_modules/unified/index.js +++ b/node_modules/unified/index.js @@ -1,20 +1,19 @@ 'use strict' -/* Dependencies. */ -var extend = require('extend') var bail = require('bail') -var vfile = require('vfile') -var trough = require('trough') -var string = require('x-is-string') +var buffer = require('is-buffer') +var extend = require('extend') var plain = require('is-plain-obj') +var trough = require('trough') +var vfile = require('vfile') -/* Expose a frozen processor. */ +// Expose a frozen processor. module.exports = unified().freeze() var slice = [].slice var own = {}.hasOwnProperty -/* Process pipeline. */ +// Process pipeline. var pipeline = trough() .use(pipelineParse) .use(pipelineRun) @@ -27,9 +26,9 @@ function pipelineParse(p, ctx) { function pipelineRun(p, ctx, next) { p.run(ctx.tree, ctx.file, done) - function done(err, tree, file) { - if (err) { - next(err) + function done(error, tree, file) { + if (error) { + next(error) } else { ctx.tree = tree ctx.file = file @@ -39,28 +38,36 @@ function pipelineRun(p, ctx, next) { } function pipelineStringify(p, ctx) { - ctx.file.contents = p.stringify(ctx.tree, ctx.file) + var result = p.stringify(ctx.tree, ctx.file) + + if (result === undefined || result === null) { + // Empty. + } else if (typeof result === 'string' || buffer(result)) { + ctx.file.contents = result + } else { + ctx.file.result = result + } } -/* Function to create the first processor. */ +// Function to create the first processor. function unified() { var attachers = [] var transformers = trough() var namespace = {} - var frozen = false var freezeIndex = -1 + var frozen - /* Data management. */ + // Data management. processor.data = data - /* Lock. */ + // Lock. processor.freeze = freeze - /* Plug-ins. */ + // Plugins. processor.attachers = attachers processor.use = use - /* API. */ + // API. processor.parse = parse processor.stringify = stringify processor.run = run @@ -68,17 +75,15 @@ function unified() { processor.process = process processor.processSync = processSync - /* Expose. */ + // Expose. return processor - /* Create a new processor based on the processor - * in the current scope. */ + // Create a new processor based on the processor in the current scope. function processor() { var destination = unified() - var length = attachers.length var index = -1 - while (++index < length) { + while (++index < attachers.length) { destination.use.apply(null, attachers[index]) } @@ -87,19 +92,15 @@ function unified() { return destination } - /* Freeze: used to signal a processor that has finished - * configuration. - * - * For example, take unified itself. It’s frozen. - * Plug-ins should not be added to it. Rather, it should - * be extended, by invoking it, before modifying it. - * - * In essence, always invoke this when exporting a - * processor. */ + // Freeze: used to signal a processor that has finished configuration. + // + // For example, take unified itself: it’s frozen. + // Plugins should not be added to it. + // Rather, it should be extended, by invoking it, before modifying it. + // + // In essence, always invoke this when exporting a processor. function freeze() { var values - var plugin - var options var transformer if (frozen) { @@ -108,19 +109,16 @@ function unified() { while (++freezeIndex < attachers.length) { values = attachers[freezeIndex] - plugin = values[0] - options = values[1] - transformer = null - if (options === false) { + if (values[1] === false) { continue } - if (options === true) { + if (values[1] === true) { values[1] = undefined } - transformer = plugin.apply(processor, values.slice(1)) + transformer = values[0].apply(processor, values.slice(1)) if (typeof transformer === 'function') { transformers.use(transformer) @@ -133,48 +131,46 @@ function unified() { return processor } - /* Data management. - * Getter / setter for processor-specific informtion. */ + // Data management. + // Getter / setter for processor-specific informtion. function data(key, value) { - if (string(key)) { - /* Set `key`. */ + if (typeof key === 'string') { + // Set `key`. if (arguments.length === 2) { assertUnfrozen('data', frozen) - namespace[key] = value - return processor } - /* Get `key`. */ + // Get `key`. return (own.call(namespace, key) && namespace[key]) || null } - /* Set space. */ + // Set space. if (key) { assertUnfrozen('data', frozen) namespace = key return processor } - /* Get space. */ + // Get space. return namespace } - /* Plug-in management. - * - * Pass it: - * * an attacher and options, - * * a preset, - * * a list of presets, attachers, and arguments (list - * of attachers and options). */ + // Plugin management. + // + // Pass it: + // * an attacher and options, + // * a preset, + // * a list of presets, attachers, and arguments (list of attachers and + // options). function use(value) { var settings assertUnfrozen('use', frozen) if (value === null || value === undefined) { - /* Empty */ + // Empty. } else if (typeof value === 'function') { addPlugin.apply(null, arguments) } else if (typeof value === 'object') { @@ -216,16 +212,12 @@ function unified() { } function addList(plugins) { - var length - var index + var index = -1 if (plugins === null || plugins === undefined) { - /* Empty */ + // Empty. } else if (typeof plugins === 'object' && 'length' in plugins) { - length = plugins.length - index = -1 - - while (++index < length) { + while (++index < plugins.length) { add(plugins[index]) } } else { @@ -238,7 +230,7 @@ function unified() { if (entry) { if (plain(entry[1]) && plain(value)) { - value = extend(entry[1], value) + value = extend(true, entry[1], value) } entry[1] = value @@ -249,22 +241,17 @@ function unified() { } function find(plugin) { - var length = attachers.length var index = -1 - var entry - - while (++index < length) { - entry = attachers[index] - if (entry[0] === plugin) { - return entry + while (++index < attachers.length) { + if (attachers[index][0] === plugin) { + return attachers[index] } } } - /* Parse a file (in string or VFile representation) - * into a Unist node using the `Parser` on the - * processor. */ + // Parse a file (in string or vfile representation) into a unist node using + // the `Parser` on the processor. function parse(doc) { var file = vfile(doc) var Parser @@ -273,15 +260,15 @@ function unified() { Parser = processor.Parser assertParser('parse', Parser) - if (newable(Parser)) { + if (newable(Parser, 'parse')) { return new Parser(String(file), file).parse() } return Parser(String(file), file) // eslint-disable-line new-cap } - /* Run transforms on a Unist node representation of a file - * (in string or VFile representation), async. */ + // Run transforms on a unist node representation of a file (in string or + // vfile representation), async. function run(node, file, cb) { assertNode(node) freeze() @@ -300,10 +287,10 @@ function unified() { function executor(resolve, reject) { transformers.run(node, vfile(file), done) - function done(err, tree, file) { + function done(error, tree, file) { tree = tree || node - if (err) { - reject(err) + if (error) { + reject(error) } else if (resolve) { resolve(tree) } else { @@ -313,11 +300,11 @@ function unified() { } } - /* Run transforms on a Unist node representation of a file - * (in string or VFile representation), sync. */ + // Run transforms on a unist node representation of a file (in string or + // vfile representation), sync. function runSync(node, file) { - var complete = false var result + var complete run(node, file, done) @@ -325,16 +312,15 @@ function unified() { return result - function done(err, tree) { + function done(error, tree) { complete = true - bail(err) result = tree + bail(error) } } - /* Stringify a Unist node representation of a file - * (in string or VFile representation) into a string - * using the `Compiler` on the processor. */ + // Stringify a unist node representation of a file (in string or vfile + // representation) into a string using the `Compiler` on the processor. function stringify(node, doc) { var file = vfile(doc) var Compiler @@ -344,18 +330,17 @@ function unified() { assertCompiler('stringify', Compiler) assertNode(node) - if (newable(Compiler)) { + if (newable(Compiler, 'compile')) { return new Compiler(node, file).compile() } return Compiler(node, file) // eslint-disable-line new-cap } - /* Parse a file (in string or VFile representation) - * into a Unist node using the `Parser` on the processor, - * then run transforms on that node, and compile the - * resulting node using the `Compiler` on the processor, - * and store that result on the VFile. */ + // Parse a file (in string or vfile representation) into a unist node using + // the `Parser` on the processor, then run transforms on that node, and + // compile the resulting node using the `Compiler` on the processor, and + // store that result on the vfile. function process(doc, cb) { freeze() assertParser('process', processor.Parser) @@ -372,9 +357,9 @@ function unified() { pipeline.run(processor, {file: file}, done) - function done(err) { - if (err) { - reject(err) + function done(error) { + if (error) { + reject(error) } else if (resolve) { resolve(file) } else { @@ -384,11 +369,10 @@ function unified() { } } - /* Process the given document (in string or VFile - * representation), sync. */ + // Process the given document (in string or vfile representation), sync. function processSync(doc) { - var complete = false var file + var complete freeze() assertParser('processSync', processor.Parser) @@ -401,62 +385,68 @@ function unified() { return file - function done(err) { + function done(error) { complete = true - bail(err) + bail(error) } } } -/* Check if `func` is a constructor. */ -function newable(value) { - return typeof value === 'function' && keys(value.prototype) +// Check if `value` is a constructor. +function newable(value, name) { + return ( + typeof value === 'function' && + value.prototype && + // A function with keys in its prototype is probably a constructor. + // Classes’ prototype methods are not enumerable, so we check if some value + // exists in the prototype. + (keys(value.prototype) || name in value.prototype) + ) } -/* Check if `value` is an object with keys. */ +// Check if `value` is an object with keys. function keys(value) { var key for (key in value) { return true } + return false } -/* Assert a parser is available. */ +// Assert a parser is available. function assertParser(name, Parser) { if (typeof Parser !== 'function') { throw new Error('Cannot `' + name + '` without `Parser`') } } -/* Assert a compiler is available. */ +// Assert a compiler is available. function assertCompiler(name, Compiler) { if (typeof Compiler !== 'function') { throw new Error('Cannot `' + name + '` without `Compiler`') } } -/* Assert the processor is not frozen. */ +// Assert the processor is not frozen. function assertUnfrozen(name, frozen) { if (frozen) { throw new Error( - [ - 'Cannot invoke `' + name + '` on a frozen processor.\nCreate a new ', - 'processor first, by invoking it: use `processor()` instead of ', - '`processor`.' - ].join('') + 'Cannot invoke `' + + name + + '` on a frozen processor.\nCreate a new processor first, by invoking it: use `processor()` instead of `processor`.' ) } } -/* Assert `node` is a Unist node. */ +// Assert `node` is a unist node. function assertNode(node) { - if (!node || !string(node.type)) { + if (!node || typeof node.type !== 'string') { throw new Error('Expected node, got `' + node + '`') } } -/* Assert that `complete` is `true`. */ +// Assert that `complete` is `true`. function assertDone(name, asyncName, complete) { if (!complete) { throw new Error( diff --git a/node_modules/unified/package.json b/node_modules/unified/package.json index 21777216..1987fa74 100644 --- a/node_modules/unified/package.json +++ b/node_modules/unified/package.json @@ -1,60 +1,85 @@ { "name": "unified", - "version": "6.2.0", - "description": "Pluggable text processing interface", + "version": "9.2.1", + "description": "Interface for parsing, inspecting, transforming, and serializing content through syntax trees", "license": "MIT", "keywords": [ + "unified", "process", "parse", "transform", "compile", "stringify", + "serialize", + "ast", + "cst", + "syntax", + "tree", + "content", "rehype", "retext", "remark" ], + "homepage": "https://unifiedjs.com", "repository": "unifiedjs/unified", "bugs": "https://github.com/unifiedjs/unified/issues", - "author": "Titus Wormer <tituswormer@gmail.com> (http://wooorm.com)", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", "contributors": [ - "Titus Wormer <tituswormer@gmail.com> (http://wooorm.com)" + "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", + "Junyoung Choi <fluke8259@gmail.com>", + "Hernan Rajchert <hrajchert@gmail.com>", + "Christian Murphy <christian.murphy.42@gmail.com>", + "Vse Mozhet Byt <vsemozhetbyt@gmail.com>", + "Richard Littauer <richard.littauer@gmail.com>" ], + "types": "types/ts3.4/index.d.ts", + "typesVersions": { + ">=4.0": { + "types/ts3.4/*": [ + "types/ts4.0/*" + ] + } + }, "files": [ + "types/ts3.4/index.d.ts", + "types/ts4.0/index.d.ts", "index.js", "lib" ], "dependencies": { "bail": "^1.0.0", "extend": "^3.0.0", - "is-plain-obj": "^1.1.0", + "is-buffer": "^2.0.0", + "is-plain-obj": "^2.0.0", "trough": "^1.0.0", - "vfile": "^2.0.0", - "x-is-string": "^0.1.0" + "vfile": "^4.0.0" }, "devDependencies": { - "browserify": "^16.0.0", - "esmangle": "^1.0.0", - "nyc": "^11.0.0", - "prettier": "^1.12.1", - "remark-cli": "^5.0.0", - "remark-preset-wooorm": "^4.0.0", - "tape": "^4.4.0", - "xo": "^0.20.0" + "browserify": "^17.0.0", + "c8": "^7.0.0", + "dtslint": "^4.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "tinyify": "^3.0.0", + "xo": "^0.38.0" }, "scripts": { - "format": "remark . -qfo && prettier --write '**/*.js' && xo --fix", - "build-bundle": "browserify index.js -s unified > unified.js", - "build-mangle": "esmangle unified.js > unified.min.js", + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "build-bundle": "browserify index.js -s unified -o unified.js", + "build-mangle": "browserify index.js -s unified -p tinyify -o unified.min.js", "build": "npm run build-bundle && npm run build-mangle", "test-api": "node test", - "test-coverage": "nyc --reporter lcov tape test", - "test": "npm run format && npm run build && npm run test-coverage" - }, - "nyc": { - "check-coverage": true, - "lines": 100, - "functions": 100, - "branches": 100 + "test-coverage": "c8 --check-coverage --lines 100 --functions 100 --branches 100 --reporter lcov tape test", + "test-types": "npm run test-types-3.4 && npm run test-types-4.0", + "test-types-3.4": "dtslint types/ts3.4", + "test-types-4.0": "dtslint types/ts4.0", + "test": "npm run format && npm run build && npm run test-coverage && npm run test-types" }, "prettier": { "tabWidth": 2, @@ -69,12 +94,14 @@ "esnext": false, "rules": { "guard-for-in": "off", - "no-var": "off", - "object-shorthand": "off", - "prefer-arrow-callback": "off", + "no-unreachable-loop": "off", + "unicorn/prefer-number-properties": "off", + "unicorn/prefer-optional-catch-binding": "off", + "unicorn/prefer-reflect-apply": "off", "unicorn/prefer-type-error": "off" }, "ignores": [ + "types", "unified.js" ] }, diff --git a/node_modules/unified/readme.md b/node_modules/unified/readme.md index e979e0b3..742de126 100644 --- a/node_modules/unified/readme.md +++ b/node_modules/unified/readme.md @@ -1,26 +1,96 @@ -# ![unified][logo] - -[![Build Status][travis-badge]][travis] -[![Coverage Status][codecov-badge]][codecov] +# [![unified][logo]][site] + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] [![Chat][chat-badge]][chat] -**unified** is an interface for processing text using syntax trees. It’s what -powers [**remark**][remark], [**retext**][retext], and [**rehype**][rehype], -but it also allows for processing between multiple syntaxes. - -The website for **unified**, [`unifiedjs.github.io`][site], provides a less -technical and more practical introduction to unified. Make sure to visit it -and try its introductory [Guides][]. - -## Installation +**unified** is an interface for processing text using syntax trees. +It’s what powers [**remark**][remark] (Markdown), [**retext**][retext] (natural +language), and [**rehype**][rehype] (HTML), and allows for processing between +formats. + +## Intro + +**unified** enables new exciting projects like [Gatsby][] to pull in Markdown, +[MDX][] to embed [JSX][], and [Prettier][] to format it. +It’s used in about 500k projects on GitHub and has about 25m downloads each +month on npm: you’re probably using it. +Some notable users are [Node.js][], [Vercel][], [Netlify][], [GitHub][], +[Mozilla][], [WordPress][], [Adobe][], [Facebook][], [Google][], and many more. + +* To read about what we are up to, follow us [Twitter][] +* For a less technical and more practical introduction to unified, visit + [`unifiedjs.com`][site] and peruse its [Learn][] section +* Browse [awesome unified][awesome] to find out more about the ecosystem +* Questions? + Get help on [Discussions][chat]! +* Check out [Contribute][] below to find out how to help out, or become a + backer or sponsor on [OpenCollective][collective] + +## Sponsors + +Support this effort and give back by sponsoring on [OpenCollective][collective]! + +<!--lint ignore no-html--> + +<table> +<tr valign="middle"> +<td width="20%" align="center" colspan="2"> + <a href="https://www.gatsbyjs.org">Gatsby</a> 🥇<br><br> + <a href="https://www.gatsbyjs.org"><img src="https://avatars1.githubusercontent.com/u/12551863?s=256&v=4" width="128"></a> +</td> +<td width="20%" align="center" colspan="2"> + <a href="https://vercel.com">Vercel</a> 🥇<br><br> + <a href="https://vercel.com"><img src="https://avatars1.githubusercontent.com/u/14985020?s=256&v=4" width="128"></a> +</td> +<td width="20%" align="center" colspan="2"> + <a href="https://www.netlify.com">Netlify</a><br><br> + <!--OC has a sharper image--> + <a href="https://www.netlify.com"><img src="https://images.opencollective.com/netlify/4087de2/logo/256.png" width="128"></a> +</td> +<td width="10%" align="center"> + <a href="https://www.holloway.com">Holloway</a><br><br> + <a href="https://www.holloway.com"><img src="https://avatars1.githubusercontent.com/u/35904294?s=128&v=4" width="64"></a> +</td> +<td width="10%" align="center"> + <a href="https://themeisle.com">ThemeIsle</a><br><br> + <a href="https://themeisle.com"><img src="https://avatars1.githubusercontent.com/u/58979018?s=128&v=4" width="64"></a> +</td> +<td width="10%" align="center"> + <a href="https://boosthub.io">Boost Hub</a><br><br> + <a href="https://boosthub.io"><img src="https://images.opencollective.com/boosthub/6318083/logo/128.png" width="64"></a> +</td> +<td width="10%" align="center"> + <a href="https://expo.io">Expo</a><br><br> + <a href="https://expo.io"><img src="https://avatars1.githubusercontent.com/u/12504344?s=128&v=4" width="64"></a> +</td> +</tr> +<tr valign="middle"> +<td width="100%" align="center" colspan="10"> + <br> + <a href="https://opencollective.com/unified"><strong>You?</strong></a> + <br><br> +</td> +</tr> +</table> + +## Install [npm][]: -```bash +```sh npm install unified ``` -## Usage +This package comes with types. +If you’re using TypeScript, make sure to also install +[`@types/unist`][ts-unist]. + +## Use ```js var unified = require('unified') @@ -34,10 +104,10 @@ var report = require('vfile-reporter') unified() .use(markdown) .use(remark2rehype) - .use(doc) + .use(doc, {title: '👋🌍'}) .use(format) .use(html) - .process('# Hello world!', function(err, file) { + .process('# Hello world!', function (err, file) { console.error(report(err || file)) console.log(String(file)) }) @@ -45,12 +115,16 @@ unified() Yields: -```html +```txt no issues found -<!DOCTYPE html> +``` + +```html +<!doctype html> <html lang="en"> <head> <meta charset="utf-8"> + <title>👋🌍</title> <meta name="viewport" content="width=device-width, initial-scale=1"> </head> <body> @@ -59,40 +133,41 @@ no issues found </html> ``` -## Table of Contents +## Contents * [Description](#description) * [API](#api) - * [processor()](#processor) - * [processor.use(plugin\[, options\])](#processoruseplugin-options) - * [processor.parse(file|value)](#processorparsefilevalue) - * [processor.stringify(node\[, file\])](#processorstringifynode-file) - * [processor.run(node\[, file\]\[, done\])](#processorrunnode-file-done) - * [processor.runSync(node\[, file\])](#processorrunsyncnode-file) - * [processor.process(file|value\[, done\])](#processorprocessfilevalue-done) - * [processor.processSync(file|value)](#processorprocesssyncfilevalue) - * [processor.data(key\[, value\])](#processordatakey-value) - * [processor.freeze()](#processorfreeze) -* [Plugin](#plugin) - * [function attacher(\[options\])](#function-attacheroptions) - * [function transformer(node, file\[, next\])](#function-transformernode-file-next) -* [Preset](#preset) + * [`processor()`](#processor) + * [`processor.use(plugin[, options])`](#processoruseplugin-options) + * [`processor.parse(file)`](#processorparsefile) + * [`processor.stringify(node[, file])`](#processorstringifynode-file) + * [`processor.run(node[, file][, done])`](#processorrunnode-file-done) + * [`processor.runSync(node[, file])`](#processorrunsyncnode-file) + * [`processor.process(file[, done])`](#processorprocessfile-done) + * [`processor.processSync(file|value)`](#processorprocesssyncfilevalue) + * [`processor.data([key[, value]])`](#processordatakey-value) + * [`processor.freeze()`](#processorfreeze) +* [`Plugin`](#plugin) + * [`function attacher([options])`](#function-attacheroptions) + * [`function transformer(node, file[, next])`](#function-transformernode-file-next) +* [`Preset`](#preset) * [Contribute](#contribute) * [Acknowledgments](#acknowledgments) * [License](#license) ## Description -**unified** is an interface for processing text using syntax trees. Syntax -trees are a representation understandable to programs. Those programs, called -[**plugin**][plugin]s, take these trees and modify them, amongst other things. -To get to the syntax tree from input text there’s a [**parser**][parser]. To -get from that back to text there’s a [**compiler**][compiler]. This is the -[**process**][process] of a **processor**. +**unified** is an interface for processing text using syntax trees. +Syntax trees are a representation of text understandable to programs. +Those programs, called [*plugin*][plugin]s, take these trees and inspect and +modify them. +To get to the syntax tree from text, there is a [*parser*][parser]. +To get from that back to text, there is a [*compiler*][compiler]. +This is the [*process*][process] of a *processor*. ```ascii -| ....................... process() ......................... | -| ......... parse() ..... | run() | ..... stringify() ....... | +| ........................ process ........................... | +| .......... parse ... | ... run ... | ... stringify ..........| +--------+ +----------+ Input ->- | Parser | ->- Syntax Tree ->- | Compiler | ->- Output @@ -106,73 +181,81 @@ Input ->- | Parser | ->- Syntax Tree ->- | Compiler | ->- Output ###### Processors -Every processor implements another processor. To create a new processor invoke -another processor. This creates a processor that is configured to function the -same as its ancestor. But when the descendant processor is configured in the -future it does not affect the ancestral processor. +Every **processor** implements another processor. +To create a processor, call another processor. +The new processor is configured to work the same as its ancestor. +But when the descendant processor is configured in the future it does not affect +the ancestral processor. + +When processors are exposed from a module (for example, `unified` itself) they +should not be configured directly, as that would change their behavior for all +module users. +Those processors are [*frozen*][freeze] and they should be called to create a +new processor before they are used. -When processors are exposed from a module (for example, unified itself) they -should not be configured directly, as that would change their behaviour for all -module users. Those processors are [**frozen**][freeze] and they should be -invoked to create a new processor before they are used. +###### Syntax trees -###### Node +The **syntax trees** used in **unified** are [**unist**][unist] nodes. +A [**node**][node] is a plain JavaScript objects with a `type` field. +The semantics of nodes and format of syntax trees is defined by other projects. -The syntax trees used in **unified** are [**Unist**][unist] nodes: plain -JavaScript objects with a `type` property. The semantics of those `type`s are -defined by other projects. +There are several [*utilities*][unist-utilities] for working with nodes. -There are several [utilities][unist-utilities] for working with these nodes. +* [**hast**][hast] — HTML +* [**mdast**][mdast] — Markdown +* [**nlcst**][nlcst] — Natural language +* [**xast**][xast] — XML -###### List of Processors +###### List of processors -The following projects process different syntax trees. They parse text to -their respective syntax tree and they compile their syntax trees back to text. -These processors can be used as-is, or their parsers and compilers can be mixed -and matched with **unified** and other plugins to process between different -syntaxes. +The following projects process different [*syntax tree*][syntax-tree] formats. +They parse text to a syntax tree and compile that back to text. +These processors can be used as is, or their parser and compiler can be mixed +and matched with **unified** and plugins to process between different syntaxes. -* [**rehype**][rehype] ([**HAST**][hast]) — HTML -* [**remark**][remark] ([**MDAST**][mdast]) — Markdown -* [**retext**][retext] ([**NLCST**][nlcst]) — Natural language +* [**rehype**][rehype] ([*hast*][hast]) — HTML +* [**remark**][remark] ([*mdast*][mdast]) — Markdown +* [**retext**][retext] ([*nlcst*][nlcst]) — Natural language -###### List of Plugins +###### List of plugins -The below plugins work with **unified**, unrelated to what flavour the syntax -tree is in: +The below [**plugins**][plugin] work with **unified**, on all [*syntax +tree*][syntax-tree] formats: * [`unified-diff`](https://github.com/unifiedjs/unified-diff) — Ignore messages for unchanged lines in Travis +* [`unified-message-control`](https://github.com/unifiedjs/unified-message-control) + — Enable, disable, and ignore messages See [**remark**][remark-plugins], [**rehype**][rehype-plugins], and -[**retext**][retext-plugins] for lists of their plugins. +[**retext**][retext-plugins] for their lists of plugins. ###### File -When processing documents metadata is often gathered about that document. -[**VFile**][vfile] is a virtual file format which stores data and handles -metadata and messages for **unified** and its plugins. +When processing a document, **metadata** is often gathered about that document. +[**vfile**][vfile] is a virtual file format that stores data, metadata, and +messages about files for **unified** and its plugins. -There are several [utilities][vfile-utilities] for working with these files. +There are several [*utilities*][vfile-utilities] for working with these files. ###### Configuration -To configure a processor invoke its [`use`][use] method, supply it a -[**plugin**][plugin], and optionally settings. +[*Processors*][processors] are configured with [*plugin*][plugin]s or +with the [`data`][data] method. ###### Integrations -**unified** can integrate with the file-system through -[`unified-engine`][engine]. On top of that, CLI apps can be created with -[`unified-args`][args], Gulp plugins with [`unified-engine-gulp`][gulp], and -Atom Linters with [`unified-engine-atom`][atom]. +**unified** can integrate with the file system with [`unified-engine`][engine]. +CLI apps can be created with [`unified-args`][args], Gulp plugins with +[`unified-engine-gulp`][gulp], and Atom Linters with +[`unified-engine-atom`][atom]. -A streaming interface is provided through [`unified-stream`][stream]. +[`unified-stream`][stream] provides a streaming interface. ###### Programming interface -The API gives access to processing metadata (such as lint messages) and -supports multiple passed through files: +The API provided by **unified** allows multiple files to be processed and gives +access to *metadata* (such as lint messages): ```js var unified = require('unified') @@ -188,15 +271,10 @@ var report = require('vfile-reporter') unified() .use(markdown) .use(styleGuide) - .use( - remark2retext, - unified() - .use(english) - .use(equality) - ) + .use(remark2retext, unified().use(english).use(equality)) .use(remark2rehype) .use(html) - .process('*Emphasis* and _importance_, you guys!', function(err, file) { + .process('*Emphasis* and _stress_, you guys!', function (err, file) { console.error(report(err || file)) console.log(String(file)) }) @@ -205,29 +283,32 @@ unified() Yields: ```txt - 1:16-1:28 warning Emphasis should use `*` as a marker emphasis-marker remark-lint - 1:34-1:38 warning `guys` may be insensitive, use `people`, `persons`, `folks` instead gals-men retext-equality + 1:16-1:24 warning Emphasis should use `*` as a marker emphasis-marker remark-lint + 1:30-1:34 warning `guys` may be insensitive, use `people`, `persons`, `folks` instead gals-men retext-equality ⚠ 2 warnings -<p><em>Emphasis</em> and <em>importance</em>, you guys!</p> +``` + +```html +<p><em>Emphasis</em> and <em>stress</em>, you guys!</p> ``` ###### Processing between syntaxes -The processors can be combined in two modes. +[*Processors*][processors] can be combined in two modes. -**Bridge** mode transforms the syntax tree from one flavour (the origin) to -another (the destination). Then, transformations are applied on that tree. -Finally, the origin processor continues transforming the original syntax tree. +**Bridge** mode transforms the [*syntax tree*][syntax-tree] from one format +(*origin*) to another (*destination*). +Another processor runs on the destination tree. +Finally, the original processor continues transforming the origin tree. -**Mutate** mode also transforms the syntax tree from one flavour to another. -But then the origin processor continues transforming the destination syntax -tree. +**Mutate** mode also transforms the syntax tree from one format to another. +But the original processor continues transforming the destination tree. In the previous example (“Programming interface”), `remark-retext` is used in -bridge mode: the origin syntax tree is kept after retext is done; whereas -`remark-rehype` is used in mutate mode: it sets a new syntax tree and discards -the original. +*bridge* mode: the origin syntax tree is kept after [**retext**][retext] is +done; whereas `remark-rehype` is used in *mutate* mode: it sets a new syntax +tree and discards the origin tree. * [`remark-retext`][remark-retext] * [`remark-rehype`][remark-rehype] @@ -238,13 +319,14 @@ the original. ### `processor()` -Object describing how to process text. +[*Processor*][processors] describing how to *process* text. ###### Returns -`Function` — New [**unfrozen**][freeze] processor which is configured to -function the same as its ancestor. But when the descendant processor is -configured in the future it does not affect the ancestral processor. +`Function` — New [*unfrozen*][freeze] processor that is configured to work the +same as its ancestor. +When the descendant processor is configured in the future it does not affect the +ancestral processor. ###### Example @@ -258,9 +340,7 @@ var concat = require('concat-stream') process.stdin.pipe(concat(onconcat)) function onconcat(buf) { - var doc = remark() - .processSync(buf) - .toString() + var doc = remark().processSync(buf).toString() process.stdout.write(doc) } @@ -268,8 +348,12 @@ function onconcat(buf) { ### `processor.use(plugin[, options])` -Configure the processor to use a [**plugin**][plugin] and optionally configure -that plugin with options. +[*Configure*][configuration] the processor to use a [*plugin*][plugin] and +optionally configure that plugin with options. + +If the processor is already using this plugin, the previous plugin configuration +is changed based on the options that are passed in. +The plugin is not added a second time. ###### Signatures @@ -279,7 +363,7 @@ that plugin with options. ###### Parameters -* `plugin` ([`Plugin`][plugin]) +* `plugin` ([`Attacher`][plugin]) * `options` (`*`, optional) — Configuration for `plugin` * `preset` (`Object`) — Object with an optional `plugins` (set to `list`), and/or an optional `settings` object @@ -288,182 +372,293 @@ that plugin with options. ###### Returns -`processor` — The processor on which `use` is invoked. +`processor` — The processor that `use` was called on. ###### Note -`use` cannot be called on [frozen][freeze] processors. Invoke the processor -first to create a new unfrozen processor. +`use` cannot be called on [*frozen*][freeze] processors. +Call the processor first to create a new unfrozen processor. ###### Example -There are many ways to pass plugins to `.use()`. The below example gives an -overview. +There are many ways to pass plugins to `.use()`. +The below example gives an overview. ```js var unified = require('unified') unified() // Plugin with options: - .use(plugin, {}) + .use(pluginA, {x: true, y: true}) + // Passing the same plugin again merges configuration (to `{x: true, y: false, z: true}`): + .use(pluginA, {y: false, z: true}) // Plugins: - .use([plugin, pluginB]) + .use([pluginB, pluginC]) // Two plugins, the second with options: - .use([plugin, [pluginB, {}]]) + .use([pluginD, [pluginE, {}]]) // Preset with plugins and settings: - .use({plugins: [plugin, [pluginB, {}]], settings: {position: false}}) + .use({plugins: [pluginF, [pluginG, {}]], settings: {position: false}}) // Settings only: .use({settings: {position: false}}) - -function plugin() {} -function pluginB() {} ``` -### `processor.parse(file|value)` +### `processor.parse(file)` -Parse text to a syntax tree. +Parse text to a [*syntax tree*][syntax-tree]. ###### Parameters -* `file` ([`VFile`][file]) - — Or anything which can be given to `vfile()` +* `file` ([`VFile`][vfile]) — [*File*][file], any value accepted by `vfile()` ###### Returns -[`Node`][node] — Syntax tree representation of input. +[`Node`][node] — Parsed [*syntax tree*][syntax-tree] representing `file`. ###### Note -`parse` [freezes][freeze] the processor if not already frozen. +`parse` freezes the processor if not already [*frozen*][freeze]. + +`parse` performs the [*parse phase*][description], not the *run phase* or other +phases. + +###### Example + +The below example shows how `parse` can be used to create a syntax tree from a +file. + +```js +var unified = require('unified') +var markdown = require('remark-parse') + +var tree = unified().use(markdown).parse('# Hello world!') + +console.log(tree) +``` + +Yields: + +```js +{ + type: 'root', + children: [ + {type: 'heading', depth: 1, children: [Array], position: [Position]} + ], + position: { + start: {line: 1, column: 1, offset: 0}, + end: {line: 1, column: 15, offset: 14} + } +} +``` #### `processor.Parser` -Function handling the parsing of text to a syntax tree. Used in the -[**parse**][parse] phase in the process and invoked with a `string` and -[`VFile`][file] representation of the document to parse. +A **parser** handles the parsing of text to a [*syntax tree*][syntax-tree]. +Used in the [*parse phase*][description] and called with a `string` and +[`VFile`][vfile] representation of the text to parse. -`Parser` can be a normal function in which case it must return a -[`Node`][node]: the syntax tree representation of the given file. +`Parser` can be a function, in which case it must return a [`Node`][node]: the +syntax tree representation of the given file. -`Parser` can also be a constructor function (a function with keys in its -`prototype`) in which case it’s invoked with `new`. Instances must have a -`parse` method which is invoked without arguments and must return a -[`Node`][node]. +`Parser` can also be a constructor function (a function with a `parse` field, or +other fields, in its `prototype`), in which case it’s constructed with `new`. +Instances must have a `parse` method that is called without arguments and must +return a [`Node`][node]. ### `processor.stringify(node[, file])` -Compile a syntax tree to text. +Compile a [*syntax tree*][syntax-tree]. ###### Parameters -* `node` ([`Node`][node]) -* `file` ([`VFile`][file], optional); - — Or anything which can be given to `vfile()` +* `node` ([`Node`][node]) — [*Syntax tree*][syntax-tree] to compile +* `file` ([`VFile`][vfile], optional) — [*File*][file], any value accepted by + `vfile()` ###### Returns -`string` — String representation of the syntax tree file. +`string` or `Buffer` (see notes) — Textual representation of the [*syntax +tree*][syntax-tree] ###### Note -`stringify` [freezes][freeze] the processor if not already frozen. +`stringify` freezes the processor if not already [*frozen*][freeze]. + +`stringify` performs the [*stringify phase*][description], not the *run phase* +or other phases. + +unified typically compiles by serializing: most [*compiler*][compiler]s return +`string` (or `Buffer`). +Some compilers, such as the one configured with [`rehype-react`][rehype-react], +return other values (in this case, a React tree). +If you’re using a compiler doesn’t serialize, expect different result values. +When using TypeScript, cast the type on your side. + +###### Example + +The below example shows how `stringify` can be used to serialize a syntax tree. + +```js +var unified = require('unified') +var html = require('rehype-stringify') +var h = require('hastscript') + +var tree = h('h1', 'Hello world!') + +var doc = unified().use(html).stringify(tree) + +console.log(doc) +``` + +Yields: + +```html +<h1>Hello world!</h1> +``` #### `processor.Compiler` -Function handling the compilation of syntax tree to a text. Used in the -[**stringify**][stringify] phase in the process and invoked with a -[`Node`][node] and [`VFile`][file] representation of the document to stringify. +A **compiler** handles the compiling of a [*syntax tree*][syntax-tree] to text. +Used in the [*stringify phase*][description] and called with a [`Node`][node] +and [`VFile`][file] representation of syntax tree to compile. -`Compiler` can be a normal function in which case it must return a `string`: -the text representation of the given syntax tree. +`Compiler` can be a function, in which case it should return a `string`: the +textual representation of the syntax tree. -`Compiler` can also be a constructor function (a function with keys in its -`prototype`) in which case it’s invoked with `new`. Instances must have a -`compile` method which is invoked without arguments and must return a `string`. +`Compiler` can also be a constructor function (a function with a `compile` +field, or other fields, in its `prototype`), in which case it’s constructed with +`new`. +Instances must have a `compile` method that is called without arguments and +should return a `string`. ### `processor.run(node[, file][, done])` -Transform a syntax tree by applying [**plugin**][plugin]s to it. +Run [*transformers*][transformer] on a [*syntax tree*][syntax-tree]. ###### Parameters -* `node` ([`Node`][node]) -* `file` ([`VFile`][file], optional) - — Or anything which can be given to `vfile()` -* `done` ([`Function`][run-done], optional) +* `node` ([`Node`][node]) — [*Syntax tree*][syntax-tree] to run on +* `file` ([`VFile`][vfile], optional) — [*File*][file], any value accepted by + `vfile()` +* `done` ([`Function`][run-done], optional) — Callback ###### Returns -[`Promise`][promise] if `done` is not given. Rejected with an error, or -resolved with the resulting syntax tree. +[`Promise`][promise] if `done` is not given. +The returned promise is rejected with a fatal error, or resolved with the +transformed [*syntax tree*][syntax-tree]. ###### Note -`run` [freezes][freeze] the processor if not already frozen. +`run` freezes the processor if not already [*frozen*][freeze]. + +`run` performs the [*run phase*][description], not other phases. -##### `function done(err[, node, file])` +#### `function done(err[, node, file])` -Invoked when transformation is complete. Either invoked with an error or a -syntax tree and a file. +Callback called when [*transformers*][transformer] are done. +Called with either an error or results. ###### Parameters -* `err` (`Error`) — Fatal error -* `node` ([`Node`][node]) -* `file` ([`VFile`][file]) +* `err` (`Error`, optional) — Fatal error +* `node` ([`Node`][node], optional) — Transformed [*syntax tree*][syntax-tree] +* `file` ([`VFile`][vfile], optional) — [*File*][file] + +###### Example + +The below example shows how `run` can be used to transform a syntax tree. + +```js +var unified = require('unified') +var references = require('remark-reference-links') +var u = require('unist-builder') + +var tree = u('root', [ + u('paragraph', [ + u('link', {href: 'https://example.com'}, [u('text', 'Example Domain')]) + ]) +]) + +unified() + .use(references) + .run(tree, function (err, tree) { + if (err) throw err + console.log(tree) + }) +``` + +Yields: + +```js +{ + type: 'root', + children: [ + {type: 'paragraph', children: [Array]}, + {type: 'definition', identifier: '1', title: undefined, url: undefined} + ] +} +``` ### `processor.runSync(node[, file])` -Transform a syntax tree by applying [**plugin**][plugin]s to it. +Run [*transformers*][transformer] on a [*syntax tree*][syntax-tree]. -If asynchronous [**plugin**][plugin]s are configured an error is thrown. +An error is thrown if asynchronous [*plugin*][plugin]s are configured. ###### Parameters -* `node` ([`Node`][node]) -* `file` ([`VFile`][file], optional) - — Or anything which can be given to `vfile()` +* `node` ([`Node`][node]) — [*Syntax tree*][syntax-tree] to run on +* `file` ([`VFile`][vfile], optional) — [*File*][file], any value accepted by + `vfile()` ###### Returns -[`Node`][node] — The given syntax tree. +[`Node`][node] — Transformed [*syntax tree*][syntax-tree]. ###### Note -`runSync` [freezes][freeze] the processor if not already frozen. +`runSync` freezes the processor if not already [*frozen*][freeze]. + +`runSync` performs the [*run phase*][description], not other phases. -### `processor.process(file|value[, done])` +### `processor.process(file[, done])` -Process the given representation of a file as configured on the processor. The -process invokes `parse`, `run`, and `stringify` internally. +[*Process*][description] the given [*file*][file] as configured on the +processor. ###### Parameters -* `file` ([`VFile`][file]) -* `value` (`string`) — String representation of a file -* `done` ([`Function`][process-done], optional) +* `file` ([`VFile`][vfile]) — [*File*][file], any value accepted by `vfile()` +* `done` ([`Function`][process-done], optional) — Callback ###### Returns -[`Promise`][promise] if `done` is not given. Rejected with an error or -resolved with the resulting file. - -###### Note +[`Promise`][promise] if `done` is not given. +The returned promise is rejected with a fatal error, or resolved with the +processed [*file*][file]. -`process` [freezes][freeze] the processor if not already frozen. +The parsed, transformed, and compiled value is exposed on +[`file.contents`][vfile-contents] or `file.result` (see notes). -#### `function done(err, file)` +###### Note -Invoked when the process is complete. Invoked with a fatal error, if any, and -the [`VFile`][file]. +`process` freezes the processor if not already [*frozen*][freeze]. -###### Parameters +`process` performs the [*parse*, *run*, and *stringify* phases][description]. -* `err` (`Error`, optional) — Fatal error -* `file` ([`VFile`][file]) +unified typically compiles by serializing: most [*compiler*][compiler]s return +`string` (or `Buffer`). +Some compilers, such as the one configured with [`rehype-react`][rehype-react], +return other values (in this case, a React tree). +If you’re using a compiler that serializes, the result is available at +`file.contents`. +Otherwise, the result is available at `file.result`. ###### Example +The below example shows how `process` can be used to process a file, whether +transformers are asynchronous or not, with promises. + ```js var unified = require('unified') var markdown = require('remark-parse') @@ -475,15 +670,15 @@ var html = require('rehype-stringify') unified() .use(markdown) .use(remark2rehype) - .use(doc) + .use(doc, {title: '👋🌍'}) .use(format) .use(html) .process('# Hello world!') .then( - function(file) { + function (file) { console.log(String(file)) }, - function(err) { + function (err) { console.error(String(err)) } ) @@ -492,10 +687,11 @@ unified() Yields: ```html -<!DOCTYPE html> +<!doctype html> <html lang="en"> <head> <meta charset="utf-8"> + <title>👋🌍</title> <meta name="viewport" content="width=device-width, initial-scale=1"> </head> <body> @@ -504,28 +700,86 @@ Yields: </html> ``` +#### `function done(err, file)` + +Callback called when the [*process*][description] is done. +Called with a fatal error, if any, and a [*file*][file]. + +###### Parameters + +* `err` (`Error`, optional) — Fatal error +* `file` ([`VFile`][vfile]) — Processed [*file*][file] + +###### Example + +The below example shows how `process` can be used to process a file, whether +transformers are asynchronous or not, with a callback. + +```js +var unified = require('unified') +var parse = require('remark-parse') +var stringify = require('remark-stringify') +var github = require('remark-github') +var report = require('vfile-reporter') + +unified() + .use(parse) + .use(github) + .use(stringify) + .process('@wooorm', function (err, file) { + console.error(report(err || file)) + console.log(String(file)) + }) +``` + +Yields: + +```txt +no issues found +``` + +```markdown +[**@wooorm**](https://github.com/wooorm) +``` + ### `processor.processSync(file|value)` -Process the given representation of a file as configured on the processor. The -process invokes `parse`, `run`, and `stringify` internally. +[*Process*][description] the given [*file*][file] as configured on the +processor. -If asynchronous [**plugin**][plugin]s are configured an error is thrown. +An error is thrown if asynchronous [*plugin*][plugin]s are configured. ###### Parameters -* `file` ([`VFile`][file]) -* `value` (`string`) — String representation of a file +* `file` ([`VFile`][vfile]) — [*File*][file], any value accepted by `vfile()` ###### Returns -[`VFile`][file] — Virtual file with modified [`contents`][vfile-contents]. +([`VFile`][vfile]) — Processed [*file*][file] + +The parsed, transformed, and compiled value is exposed on +[`file.contents`][vfile-contents] or `file.result` (see notes). ###### Note -`processSync` [freezes][freeze] the processor if not already frozen. +`processSync` freezes the processor if not already [*frozen*][freeze]. + +`processSync` performs the [*parse*, *run*, and *stringify* +phases][description]. + +unified typically compiles by serializing: most [*compiler*][compiler]s return +`string` (or `Buffer`). +Some compilers, such as the one configured with [`rehype-react`][rehype-react], +return other values (in this case, a React tree). +If you’re using a compiler that serializes, the result is available at +`file.contents`. +Otherwise, the result is available at `file.result`. ###### Example +The below example shows how `processSync` can be used to process a file, if all +transformers are synchronous. + ```js var unified = require('unified') var markdown = require('remark-parse') @@ -537,7 +791,7 @@ var html = require('rehype-stringify') var processor = unified() .use(markdown) .use(remark2rehype) - .use(doc) + .use(doc, {title: '👋🌍'}) .use(format) .use(html) @@ -547,10 +801,11 @@ console.log(processor.processSync('# Hello world!').toString()) Yields: ```html -<!DOCTYPE html> +<!doctype html> <html lang="en"> <head> <meta charset="utf-8"> + <title>👋🌍</title> <meta name="viewport" content="width=device-width, initial-scale=1"> </head> <body> @@ -559,26 +814,40 @@ Yields: </html> ``` -### `processor.data(key[, value])` +### `processor.data([key[, value]])` -Get or set information in an in-memory key-value store accessible to all phases -of the process. An example is a list of HTML elements which are self-closing, -which is needed when parsing, transforming, and compiling HTML. +[*Configure*][configuration] the processor with information available to all +[*plugin*][plugin]s. +Information is stored in an in-memory key-value store. + +Typically, options can be given to a specific plugin, but sometimes it makes +sense to have information shared with several plugins. +For example, a list of HTML elements that are self-closing, which is needed +during all [*phases*][description] of the *process*. + +###### Signatures + +* `processor = processor.data(key, value)` +* `processor = processor.data(values)` +* `value = processor.data(key)` +* `info = processor.data()` ###### Parameters -* `key` (`string`) — Identifier -* `value` (`*`, optional) — Value to set. Omit if getting `key` +* `key` (`string`, optional) — Identifier +* `value` (`*`, optional) — Value to set +* `values` (`Object`, optional) — Values to set ###### Returns -* `processor` — If setting, the processor on which `data` is invoked -* `*` — If getting, the value at `key` +* `processor` — If setting, the processor that `data` is called on +* `value` (`*`) — If getting, the value at `key` +* `info` (`Object`) — Without arguments, the key-value store ###### Note -Setting information with `data` cannot occur on [frozen][freeze] processors. -Invoke the processor first to create a new unfrozen processor. +Setting information cannot occur on [*frozen*][freeze] processors. +Call the processor first to create a new unfrozen processor. ###### Example @@ -587,49 +856,45 @@ The following example show how to get and set information: ```js var unified = require('unified') -console.log( - unified() - .data('alpha', 'bravo') - .data('alpha') -) -``` +var processor = unified().data('alpha', 'bravo') -Yields: +processor.data('alpha') // => 'bravo' -```txt -bravo +processor.data() // {alpha: 'bravo'} + +processor.data({charlie: 'delta'}) + +processor.data() // {charlie: 'delta'} ``` ### `processor.freeze()` -Freeze a processor. Frozen processors are meant to be extended and not to be -configured or processed directly. +**Freeze** a processor. +*Frozen* processors are meant to be extended and not to be configured directly. -Once a processor is frozen it cannot be unfrozen. New processors functioning -just like it can be created by invoking the processor. +Once a processor is frozen it cannot be *unfrozen*. +New processors working the same way can be created by calling the processor. -It’s possible to freeze processors explicitly, by calling `.freeze()`, but -[`.parse()`][parse], [`.run()`][run], [`.stringify()`][stringify], and -[`.process()`][process] call `.freeze()` to freeze a processor too. +It’s possible to freeze processors explicitly by calling `.freeze()`. +Processors freeze implicitly when [`.parse()`][parse], [`.run()`][run], +[`.runSync()`][run-sync], [`.stringify()`][stringify], [`.process()`][process], +or [`.processSync()`][process-sync] are called. ###### Returns -`Processor` — The processor on which `freeze` is invoked. +`processor` — The processor that `freeze` was called on. ###### Example -The following example, `index.js`, shows how [**rehype**][rehype] prevents -extensions to itself: +The following example, `index.js`, shows how rehype prevents extensions to +itself: ```js var unified = require('unified') var parse = require('rehype-parse') var stringify = require('rehype-stringify') -module.exports = unified() - .use(parse) - .use(stringify) - .freeze() +module.exports = unified().use(parse).use(stringify).freeze() ``` The below example, `a.js`, shows how that processor can be used and configured. @@ -637,26 +902,27 @@ The below example, `a.js`, shows how that processor can be used and configured. ```js var rehype = require('rehype') var format = require('rehype-format') -// ... +// … rehype() .use(format) - // ... + // … ``` -The below example, `b.js`, shows a similar looking example which operates on -the frozen [**rehype**][rehype] interface. If this behaviour was allowed it -would result in unexpected behaviour so an error is thrown. **This is -invalid**: +The below example, `b.js`, shows a similar looking example that operates on the +frozen rehype interface because it does not call `rehype`. +If this behavior was allowed it would result in unexpected behavior so an +error is thrown. +**This is invalid**: ```js var rehype = require('rehype') var format = require('rehype-format') -// ... +// … rehype .use(format) - // ... + // … ``` Yields: @@ -675,15 +941,15 @@ Create a new processor first, by invoking it: use `processor()` instead of `proc ## `Plugin` -**unified** plugins change the way the applied-on processor works in the -following ways: +**Plugins** [*configure*][configuration] the processors they are applied on in +the following ways: -* They modify the [**processor**][processor]: such as changing the parser, - the compiler, or linking it to other processors -* They transform [**syntax tree**][node] representation of files -* They modify metadata of files +* They change the processor: such as the [*parser*][parser], the + [*compiler*][compiler], or configuring [*data*][data] +* They specify how to handle [*syntax trees*][syntax-tree] and [*files*][file] -Plugins are a concept. They materialise as [`attacher`][attacher]s. +Plugins are a concept. +They materialize as [`attacher`][attacher]s. ###### Example @@ -709,6 +975,12 @@ function move(options) { } ``` +`index.md`: + +```markdown +# Hello, world! +``` + `index.js`: ```js @@ -725,7 +997,7 @@ unified() .use(remark2rehype) .use(move, {extname: '.html'}) .use(stringify) - .process(vfile.readSync('index.md'), function(err, file) { + .process(vfile.readSync('index.md'), function (err, file) { console.error(report(err || file)) if (file) { vfile.writeSync(file) // Written to `index.html`. @@ -733,18 +1005,31 @@ unified() }) ``` +Yields: + +```txt +index.md: no issues found +``` + +`index.html`: + +```html +<h1>Hello, world!</h1> +``` + ### `function attacher([options])` -An attacher is the thing passed to [`use`][use]. It configures the processor -and in turn can receive options. +**Attachers** are materialized [*plugin*][plugin]s. +An attacher is a function that can receive options and +[*configures*][configuration] the processor. -Attachers can configure processors, such as by interacting with parsers and -compilers, linking them to other processors, or by specifying how the syntax -tree is handled. +Attachers change the processor, such as the [*parser*][parser], the +[*compiler*][compiler], configuring [*data*][data], or by specifying how the +[*syntax tree*][syntax-tree] or [*file*][file] are handled. ###### Context -The context object is set to the invoked on [`processor`][processor]. +The context object (`this`) is set to the processor the attacher is applied on. ###### Parameters @@ -756,68 +1041,85 @@ The context object is set to the invoked on [`processor`][processor]. ###### Note -Attachers are invoked when the processor is [frozen][freeze]: either when -`.freeze()` is called explicitly, or when [`.parse()`][parse], [`.run()`][run], -[`.stringify()`][stringify], or [`.process()`][process] is called for the first -time. +Attachers are called when the processor is [*frozen*][freeze], not when they are +applied. ### `function transformer(node, file[, next])` -Transformers modify the syntax tree or metadata of a file. A transformer is a -function which is invoked each time a file is passed through the transform -phase. If an error occurs (either because it’s thrown, returned, rejected, or -passed to [`next`][next]), the process stops. +**Transformers** handle [*syntax tree*][syntax-tree]s and [*file*][file]s. +A transformer is a function that is called each time a syntax tree and file are +passed through the [*run phase*][description]. +If an error occurs (either because it’s thrown, returned, rejected, or passed to +[`next`][next]), the process stops. -The transformation process in **unified** is handled by [`trough`][trough], see -it’s documentation for the exact semantics of transformers. +The *run phase* is handled by [`trough`][trough], see its documentation for the +exact semantics of these functions. ###### Parameters -* `node` ([`Node`][node]) -* `file` ([`VFile`][file]) +* `node` ([`Node`][node]) — [*Syntax tree*][syntax-tree] to handle +* `file` ([`VFile`][vfile]) — [*File*][file] to handle * `next` ([`Function`][next], optional) ###### Returns -* `Error` — Can be returned to stop the process -* [`Node`][node] — Can be returned and results in further transformations - and `stringify`s to be performed on the new tree -* `Promise` — If a promise is returned, the function is asynchronous, and - **must** be resolved (optionally with a [`Node`][node]) or rejected - (optionally with an `Error`) +* `void` — If nothing is returned, the next transformer keeps using same tree. +* `Error` — Fatal error to stop the process +* `node` ([`Node`][node]) — New [*syntax tree*][syntax-tree]. + If returned, the next transformer is given this new tree +* `Promise` — Returned to perform an asynchronous operation. + The promise **must** be resolved (optionally with a [`Node`][node]) or + rejected (optionally with an `Error`) #### `function next(err[, tree[, file]])` -If the signature of a transformer includes `next` (third argument), the -function **may** finish asynchronous, and **must** invoke `next()`. +If the signature of a [*transformer*][transformer] includes `next` (the third +argument), the transformer **may** perform asynchronous operations, and **must** +call `next()`. ###### Parameters -* `err` (`Error`, optional) — Stop the process -* `node` ([`Node`][node], optional) — New syntax tree -* `file` ([`VFile`][file], optional) — New virtual file +* `err` (`Error`, optional) — Fatal error to stop the process +* `node` ([`Node`][node], optional) — New [*syntax tree*][syntax-tree]. + If given, the next transformer is given this new tree +* `file` ([`VFile`][vfile], optional) — New [*file*][file]. + If given, the next transformer is given this new file ## `Preset` -Presets provide a potentially sharable way to configure processors. They can -contain multiple plugins and optionally settings as well. +**Presets** are sharable [*configuration*][configuration]. +They can contain [*plugins*][plugin] and settings. ###### Example `preset.js`: ```js -exports.settings = {bullet: '*', fences: true} +exports.settings = {bullet: '*', emphasis: '*', fences: true} exports.plugins = [ require('remark-preset-lint-recommended'), + require('remark-preset-lint-consistent'), require('remark-comment-config'), - require('remark-preset-lint-markdown-style-guide'), [require('remark-toc'), {maxDepth: 3, tight: true}], - require('remark-github') + require('remark-license') ] ``` +`readme.md`: + +```markdown +# Hello, world! + +_Emphasis_ and **importance**. + +## Table of contents + +## API + +## License +``` + `index.js`: ```js @@ -828,7 +1130,7 @@ var preset = require('./preset') remark() .use(preset) - .process(vfile.readSync('index.md'), function(err, file) { + .process(vfile.readSync('readme.md'), function (err, file) { console.error(report(err || file)) if (file) { @@ -837,32 +1139,57 @@ remark() }) ``` -## Contribute +Yields: + +```txt +readme.md: no issues found +``` -**unified** is built by people just like you! Check out -[`contributing.md`][contributing] for ways to get started. +`readme.md` now contains: -This project has a [Code of Conduct][coc]. By interacting with this repository, -organisation, or community you agree to abide by its terms. +```markdown +# Hello, world! -Want to chat with the community and contributors? Join us in [Gitter][chat]! +*Emphasis* and **importance**. -Have an idea for a cool new utility or tool? That’s great! If you want -feedback, help, or just to share it with the world you can do so by creating -an issue in the [`unifiedjs/ideas`][ideas] repository! +## Table of contents + +* [API](#api) +* [License](#license) + +## API + +## License + +[MIT](license) © [Titus Wormer](https://wooorm.com) +``` + +## Contribute + +See [`contributing.md`][contributing] in [`unifiedjs/.github`][health] for ways +to get started. +See [`support.md`][support] for ways to get help. +Ideas for new plugins and tools can be posted in [`unifiedjs/ideas`][ideas]. + +A curated list of awesome unified resources can be found in [**awesome +unified**][awesome]. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. ## Acknowledgments Preliminary work for unified was done [in 2014][preliminary] for -[**retext**][retext] and inspired by [`ware`][ware]. Further incubation -happened in [**remark**][remark]. The project was finally [externalised][] -in 2015 and [published][] as `unified`. The project was authored by -[**@wooorm**](https://github.com/wooorm). +[**retext**][retext] and inspired by [`ware`][ware]. +Further incubation happened in [**remark**][remark]. +The project was finally [externalised][] in 2015 and [published][] as `unified`. +The project was authored by [**@wooorm**](https://github.com/wooorm). -Although `unified` since moved it’s plugin architecture to [`trough`][trough], +Although `unified` since moved its plugin architecture to [`trough`][trough], thanks to [**@calvinfo**](https://github.com/calvinfo), [**@ianstormtaylor**](https://github.com/ianstormtaylor), and others for their -work on [`ware`][ware], which was a huge initial inspiration. +work on [`ware`][ware], as it was a huge initial inspiration. ## License @@ -870,29 +1197,57 @@ work on [`ware`][ware], which was a huge initial inspiration. <!-- Definitions --> -[logo]: https://cdn.rawgit.com/unifiedjs/unified/0cd3a41/logo.svg +[logo]: https://raw.githubusercontent.com/unifiedjs/unified/93862e5/logo.svg?sanitize=true + +[build-badge]: https://github.com/unifiedjs/unified/workflows/main/badge.svg + +[build]: https://github.com/unifiedjs/unified/actions + +[coverage-badge]: https://img.shields.io/codecov/c/github/unifiedjs/unified.svg + +[coverage]: https://codecov.io/github/unifiedjs/unified + +[downloads-badge]: https://img.shields.io/npm/dm/unified.svg + +[downloads]: https://www.npmjs.com/package/unified + +[size-badge]: https://img.shields.io/bundlephobia/minzip/unified.svg + +[size]: https://bundlephobia.com/result?p=unified + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg -[travis-badge]: https://img.shields.io/travis/unifiedjs/unified.svg +[chat]: https://github.com/unifiedjs/unified/discussions -[travis]: https://travis-ci.org/unifiedjs/unified +[health]: https://github.com/unifiedjs/.github -[codecov-badge]: https://img.shields.io/codecov/c/github/unifiedjs/unified.svg +[contributing]: https://github.com/unifiedjs/.github/blob/HEAD/contributing.md -[codecov]: https://codecov.io/github/unifiedjs/unified +[support]: https://github.com/unifiedjs/.github/blob/HEAD/support.md -[chat-badge]: https://img.shields.io/gitter/room/unifiedjs/Lobby.svg +[coc]: https://github.com/unifiedjs/.github/blob/HEAD/code-of-conduct.md -[chat]: https://gitter.im/unifiedjs/Lobby +[awesome]: https://github.com/unifiedjs/awesome-unified + +[license]: license + +[author]: https://wooorm.com [npm]: https://docs.npmjs.com/cli/install -[license]: LICENSE +[ts-unist]: https://www.npmjs.com/package/@types/unist -[author]: http://wooorm.com +[site]: https://unifiedjs.com -[site]: https://unifiedjs.github.io +[twitter]: https://twitter.com/unifiedjs -[guides]: https://unifiedjs.github.io/#guides +[learn]: https://unifiedjs.com/learn/ [rehype]: https://github.com/rehypejs/rehype @@ -906,6 +1261,8 @@ work on [`ware`][ware], which was a huge initial inspiration. [nlcst]: https://github.com/syntax-tree/nlcst +[xast]: https://github.com/syntax-tree/xast + [unist]: https://github.com/syntax-tree/unist [engine]: https://github.com/unifiedjs/unified-engine @@ -932,15 +1289,23 @@ work on [`ware`][ware], which was a huge initial inspiration. [vfile-utilities]: https://github.com/vfile/vfile#related-tools +[node]: https://github.com/syntax-tree/unist#node + +[description]: #description + +[syntax-tree]: #syntax-trees + +[configuration]: #configuration + [file]: #file -[node]: #node +[processors]: #processors -[processor]: #processor +[process]: #processorprocessfile-done -[process]: #processorprocessfilevalue-done +[process-sync]: #processorprocesssyncfilevalue -[parse]: #processorparsefilevalue +[parse]: #processorparsefile [parser]: #processorparser @@ -948,9 +1313,11 @@ work on [`ware`][ware], which was a huge initial inspiration. [run]: #processorrunnode-file-done +[run-sync]: #processorrunsyncnode-file + [compiler]: #processorcompiler -[use]: #processoruseplugin-options +[data]: #processordatakey-value [attacher]: #function-attacheroptions @@ -966,22 +1333,22 @@ work on [`ware`][ware], which was a huge initial inspiration. [process-done]: #function-doneerr-file +[contribute]: #contribute + +[rehype-react]: https://github.com/rhysd/rehype-react + [trough]: https://github.com/wooorm/trough#function-fninput-next [promise]: https://developer.mozilla.org/Web/JavaScript/Reference/Global_Objects/Promise -[remark-plugins]: https://github.com/remarkjs/remark/blob/master/doc/plugins.md#list-of-plugins +[remark-plugins]: https://github.com/remarkjs/remark/blob/HEAD/doc/plugins.md#list-of-plugins -[rehype-plugins]: https://github.com/rehypejs/rehype/blob/master/doc/plugins.md#list-of-plugins +[rehype-plugins]: https://github.com/rehypejs/rehype/blob/HEAD/doc/plugins.md#list-of-plugins -[retext-plugins]: https://github.com/retextjs/retext/blob/master/doc/plugins.md#list-of-plugins +[retext-plugins]: https://github.com/retextjs/retext/blob/HEAD/doc/plugins.md#list-of-plugins [stream]: https://github.com/unifiedjs/unified-stream -[contributing]: contributing.md - -[coc]: code-of-conduct.md - [ideas]: https://github.com/unifiedjs/ideas [preliminary]: https://github.com/retextjs/retext/commit/8fcb1f#diff-168726dbe96b3ce427e7fedce31bb0bc @@ -991,3 +1358,29 @@ work on [`ware`][ware], which was a huge initial inspiration. [published]: https://github.com/unifiedjs/unified/commit/2ba1cf [ware]: https://github.com/segmentio/ware + +[gatsby]: https://www.gatsbyjs.org + +[mdx]: https://mdxjs.com + +[jsx]: https://reactjs.org/docs/jsx-in-depth.html + +[prettier]: https://prettier.io + +[node.js]: https://nodejs.org + +[vercel]: https://vercel.com + +[netlify]: https://www.netlify.com + +[github]: https://github.com + +[mozilla]: https://www.mozilla.org + +[wordpress]: https://wordpress.com + +[adobe]: https://www.adobe.com + +[facebook]: https://www.facebook.com + +[google]: https://www.google.com diff --git a/node_modules/unified/types/ts3.4/index.d.ts b/node_modules/unified/types/ts3.4/index.d.ts new file mode 100644 index 00000000..bcac0860 --- /dev/null +++ b/node_modules/unified/types/ts3.4/index.d.ts @@ -0,0 +1,407 @@ +// TypeScript Version: 3.4 + +import {Node} from 'unist' +import {VFile, VFileCompatible} from 'vfile' + +declare namespace unified { + /** + * Processor allows plugins, parsers, and compilers to be chained together to transform content. + * + * @typeParam P Processor settings. Useful when packaging unified with a preset parser and compiler. + */ + interface Processor<P = Settings> extends FrozenProcessor<P> { + /** + * Configure the processor to use a plugin and optionally configure that plugin with options. + * + * @param plugin unified plugin + * @param settings Configuration for plugin + * @typeParam S Plugin settings + * @returns The processor on which use is invoked + */ + use<S extends any[] = [Settings?]>( + plugin: Plugin<S, P>, + ...settings: S + ): Processor<P> + + /** + * Configure the processor with a preset to use + * + * @param preset `Object` with an plugins (set to list), and/or an optional settings object + */ + use<S extends any[] = [Settings?]>(preset: Preset<S, P>): Processor<P> + + /** + * Configure using a tuple of plugin and setting(s) + * + * @param pluginTuple pairs, plugin and settings in an array + * @typeParam S Plugin settings + */ + use<S extends any[] = [Settings?]>( + pluginTuple: PluginTuple<S, P> + ): Processor<P> + + /** + * A list of plugins and presets to be applied to processor + * + * @param list List of plugins, presets, and pairs + */ + use(list: PluggableList<P>): Processor<P> + + /** + * Configuration passed to a frozen processor + * + * @param processorSettings Settings passed to processor + */ + use(processorSettings: ProcessorSettings<P>): Processor<P> + } + + /** + * A frozen processor is just like a regular processor, except no additional plugins can be added. + * A frozen processor can be created by calling `.freeze()` on a processor. + * + * See `Processor`. + */ + interface FrozenProcessor<P = Settings> { + /** + * Clone current processor + * + * @returns New unfrozen processor which is configured to function the same as its ancestor. + * But when the descendant processor is configured in the future it does not affect the ancestral processor. + */ + (): Processor<P> + + /** + * Parse text to a syntax tree. + * + * @param file VFile or anything which can be given to vfile() + * @returns Syntax tree representation of input. + */ + parse(file: VFileCompatible): Node + + /** + * Function handling the parsing of text to a syntax tree. + * Used in the parse phase in the process and invoked with a `string` and `VFile` representation of the document to parse. + * + * `Parser` can be a normal function in which case it must return a `Node`: the syntax tree representation of the given file. + * + * `Parser` can also be a constructor function (a function with keys in its `prototype`) in which case it’s invoked with `new`. + * Instances must have a parse method which is invoked without arguments and must return a `Node`. + */ + Parser: ParserConstructor | ParserFunction + + /** + * Compile a syntax tree to text. + * + * @param node unist node + * @param file `VFile` or anything which can be given to `vfile()` + * @returns String representation of the syntax tree file + */ + stringify(node: Node, file?: VFileCompatible): string + + /** + * Function handling the compilation of syntax tree to a text. + * Used in the stringify phase in the process and invoked with a `Node` and `VFile` representation of the document to stringify. + * + * `Compiler` can be a normal function in which case it must return a `string`: the text representation of the given syntax tree. + * + * `Compiler` can also be a constructor function (a function with keys in its `prototype`) in which case it’s invoked with `new`. + * Instances must have a `compile` method which is invoked without arguments and must return a `string`. + */ + Compiler: CompilerConstructor | CompilerFunction + + /** + * Transform a syntax tree by applying plugins to it. + * + * @param node Node to transform + * @returns `Promise` if `done` is not given. Rejected with an error, or resolved with the resulting syntax tree. + */ + run(node: Node): Promise<Node> + + /** + * Transform a syntax tree by applying plugins to it. + * + * @param node Node to transform + * @param file `VFile` or anything which can be given to `vfile()` + * @returns `Promise` if `done` is not given. Rejected with an error, or resolved with the resulting syntax tree. + */ + run(node: Node, file: VFileCompatible): Promise<Node> + + /** + * Transform a syntax tree by applying plugins to it. + * + * @param node Node to transform + * @param done Invoked when transformation is complete. + */ + run(node: Node, done: RunCallback): void + + /** + * Transform a syntax tree by applying plugins to it. + * + * @param node Node to transform + * @param file `VFile` or anything which can be given to `vfile()` + * @param done Invoked when transformation is complete. + */ + run(node: Node, file: VFileCompatible, done: RunCallback): void + + /** + * Transform a syntax tree by applying plugins to it. + * + * If asynchronous plugins are configured an error is thrown. + * + * @param node Node to transform + * @param file `VFile` or anything which can be given to `vfile()` + * @returns The given syntax tree. + */ + runSync(node: Node, file?: VFileCompatible): Node + + /** + * Process the given representation of a file as configured on the processor. The process invokes `parse`, `run`, and `stringify` internally. + * @param file `VFile` or anything which can be given to `vfile()` + * @returns `Promise` if `done` is not given. + * Rejected with an error or resolved with the resulting file. + */ + process(file: VFileCompatible): Promise<VFile> + + /** + * Process the given representation of a file as configured on the processor. The process invokes `parse`, `run`, and `stringify` internally. + * @param file `VFile` or anything which can be given to `vfile()` + * @param done Invoked when the process is complete. Invoked with a fatal error, if any, and the VFile. + */ + process(file: VFileCompatible, done: ProcessCallback): void + + /** + * Process the given representation of a file as configured on the processor. The process invokes `parse`, `run`, and `stringify` internally. + * + * If asynchronous plugins are configured an error is thrown. + * + * @param file `VFile` or anything which can be given to `vfile()` + * @returns Virtual file with modified contents. + */ + processSync(file: VFileCompatible): VFile + + /** + * Get or set information in an in-memory key-value store accessible to all phases of the process. + * An example is a list of HTML elements which are self-closing, which is needed when parsing, transforming, and compiling HTML. + * + * @returns key-value store object + */ + data(): {[key: string]: unknown} + + /** + * @param key Identifier + * @returns If getting, the value at key + */ + data(key: string): unknown + + /** + * @param value Value to set. Omit if getting key + * @returns If setting, the processor on which data is invoked + */ + data(key: string, value: any): Processor<P> + + /** + * Freeze a processor. Frozen processors are meant to be extended and not to be configured or processed directly. + * + * Once a processor is frozen it cannot be unfrozen. New processors functioning just like it can be created by invoking the processor. + * + * It’s possible to freeze processors explicitly, by calling `.freeze()`, but `.parse()`, `.run()`, `.stringify()`, and `.process()` call `.freeze()` to freeze a processor too. + * + * @returns The processor on which freeze is invoked. + */ + freeze(): FrozenProcessor<P> + } + + /** + * A Plugin (Attacher) is the thing passed to `use`. + * It configures the processor and in turn can receive options. + * + * Attachers can configure processors, such as by interacting with parsers and compilers, linking them to other processors, or by specifying how the syntax tree is handled. + * + * @param settings Configuration + * @typeParam S Plugin settings + * @typeParam P Processor settings + * @returns Optional Transformer. + */ + type Plugin<S extends any[] = [Settings?], P = Settings> = Attacher<S, P> + + /** + * Configuration passed to a Plugin or Processor + */ + interface Settings { + [key: string]: unknown + } + + /** + * Presets provide a potentially sharable way to configure processors. + * They can contain multiple plugins and optionally settings as well. + * + * @typeParam P Processor settings + */ + interface Preset<S = Settings, P = Settings> { + plugins: PluggableList<P> + settings?: Settings + } + + /** + * Settings can be passed directly to the processor + * + * @typeParam P Settings applied to a processor. Useful when packaging unified with a preset parser and compiler. + */ + interface ProcessorSettings<P = Settings> { + settings: P + } + + /** + * A pairing of a plugin with its settings + * + * @typeParam S Plugin settings + * @typeParam P Processor settings + */ + type PluginTuple<S extends any[] = [Settings?], P = Settings> = [ + Plugin<S, P>, + /** + * NOTE: ideally this would be S instead of any[] + * As of TypeScript 3.5.2 generic tuples cannot be spread + * See: https://github.com/microsoft/TypeScript/issues/26113 + */ + ...any[] + ] + + /** + * A union of the different ways to add plugins to unified + * + * @typeParam S Plugin settings + * @typeParam P Processor settings + */ + type Pluggable<S extends any[] = [Settings?], P = Settings> = + | Plugin<S, P> + | Preset<S, P> + | PluginTuple<S, P> + + /** + * A list of plugins and presets + * + * @typeParam P Processor settings + */ + type PluggableList<P = Settings> = Array<Pluggable<[any?], P>> + + /** + * An attacher is the thing passed to `use`. + * It configures the processor and in turn can receive options. + * + * Attachers can configure processors, such as by interacting with parsers and compilers, linking them to other processors, or by specifying how the syntax tree is handled. + * + * @param settings Configuration + * @typeParam S Plugin settings + * @typeParam P Processor settings + * @returns Optional Transformer. + */ + type Attacher<S extends any[] = [Settings?], P = Settings> = ( + this: Processor<P>, + ...settings: S + ) => Transformer | void + + /** + * Transformers modify the syntax tree or metadata of a file. A transformer is a function which is invoked each time a file is passed through the transform phase. + * If an error occurs (either because it’s thrown, returned, rejected, or passed to `next`), the process stops. + * + * The transformation process in unified is handled by `trough`, see it’s documentation for the exact semantics of transformers. + * + * @param node Node or tree to be transformed + * @param file File associated with node or tree + * @param next If the signature of a transformer includes `next` (third argument), the function may finish asynchronous, and must invoke `next()`. + * @returns + * - `void` — If nothing is returned, the next transformer keeps using same tree. + * - `Error` — Can be returned to stop the process + * - `Node` — Can be returned and results in further transformations and `stringify`s to be performed on the new tree + * - `Promise` — If a promise is returned, the function is asynchronous, and must be resolved (optionally with a `Node`) or rejected (optionally with an `Error`) + */ + type Transformer = ( + node: Node, + file: VFile, + next?: ( + error: Error | null, + tree: Node, + file: VFile + ) => Record<string, unknown> + ) => Error | Node | Promise<Node> | void | Promise<void> + + /** + * Transform file contents into an AST + */ + interface Parser { + /** + * Transform file contents into an AST + * + * @returns Parsed AST node/tree + */ + parse(): Node + } + + /** + * A constructor function (a function with keys in its `prototype`) or class that implements a + * `parse` method. + */ + type ParserConstructor = new (text: string, file: VFile) => Parser + + /** + * Transform file contents into an AST + * + * @param text Text to transform into AST node(s) + * @param file File associated with text + * @returns Parsed AST node/tree + */ + type ParserFunction = (text: string, file: VFile) => Node + + /** + * Transform an AST node/tree into text + */ + interface Compiler { + /** + * Transform an AST node/tree into text + * + * @returns Compiled text + */ + compile(): string + } + + /** + * A constructor function (a function with keys in its `prototype`) or class that implements a + * `compile` method. + */ + type CompilerConstructor = new (node: Node, file: VFile) => Compiler + + /** + * Transform an AST node/tree into text + * + * @param node Node/tree to be stringified + * @param file File associated with node + * @returns Compiled text + */ + type CompilerFunction = (node: Node, file: VFile) => string + + /** + * Access results from transforms + * + * @param error Error if any occurred + * @param node Transformed AST tree/node + * @param vfile File associated with node + */ + type RunCallback = (error: Error | null, node: Node, file: VFile) => void + + /** + * Access results from transforms + * + * @param error Error if any occurred + * @param vfile File with updated content + */ + type ProcessCallback = (error: Error | null, file: VFile) => void +} + +/** + * Unified processor allows plugins, parsers, and compilers to be chained together to transform content. + * + * @typeParam P Processor settings. Useful when packaging unified with a preset parser and compiler. + */ +declare function unified<P = unified.Settings>(): unified.Processor<P> +export = unified diff --git a/node_modules/unified/types/ts4.0/index.d.ts b/node_modules/unified/types/ts4.0/index.d.ts new file mode 100644 index 00000000..e0c2c564 --- /dev/null +++ b/node_modules/unified/types/ts4.0/index.d.ts @@ -0,0 +1,402 @@ +// TypeScript Version: 4.0 + +import {Node} from 'unist' +import {VFile, VFileCompatible} from 'vfile' + +declare namespace unified { + /** + * Processor allows plugins, parsers, and compilers to be chained together to transform content. + * + * @typeParam P Processor settings. Useful when packaging unified with a preset parser and compiler. + */ + interface Processor<P = Settings> extends FrozenProcessor<P> { + /** + * Configure the processor to use a plugin and optionally configure that plugin with options. + * + * @param plugin unified plugin + * @param settings Configuration for plugin + * @typeParam S Plugin settings + * @returns The processor on which use is invoked + */ + use<S extends any[] = [Settings?]>( + plugin: Plugin<S, P>, + ...settings: S + ): Processor<P> + + /** + * Configure the processor with a preset to use + * + * @param preset `Object` with an plugins (set to list), and/or an optional settings object + */ + use<S extends any[] = [Settings?]>(preset: Preset<S, P>): Processor<P> + + /** + * Configure using a tuple of plugin and setting(s) + * + * @param pluginTuple pairs, plugin and settings in an array + * @typeParam S Plugin settings + */ + use<S extends any[] = [Settings?]>( + pluginTuple: PluginTuple<S, P> + ): Processor<P> + + /** + * A list of plugins and presets to be applied to processor + * + * @param list List of plugins, presets, and pairs + */ + use(list: PluggableList<P>): Processor<P> + + /** + * Configuration passed to a frozen processor + * + * @param processorSettings Settings passed to processor + */ + use(processorSettings: ProcessorSettings<P>): Processor<P> + } + + /** + * A frozen processor is just like a regular processor, except no additional plugins can be added. + * A frozen processor can be created by calling `.freeze()` on a processor. + * + * See `Processor`. + */ + interface FrozenProcessor<P = Settings> { + /** + * Clone current processor + * + * @returns New unfrozen processor which is configured to function the same as its ancestor. + * But when the descendant processor is configured in the future it does not affect the ancestral processor. + */ + (): Processor<P> + + /** + * Parse text to a syntax tree. + * + * @param file VFile or anything which can be given to vfile() + * @returns Syntax tree representation of input. + */ + parse(file: VFileCompatible): Node + + /** + * Function handling the parsing of text to a syntax tree. + * Used in the parse phase in the process and invoked with a `string` and `VFile` representation of the document to parse. + * + * `Parser` can be a normal function in which case it must return a `Node`: the syntax tree representation of the given file. + * + * `Parser` can also be a constructor function (a function with keys in its `prototype`) in which case it’s invoked with `new`. + * Instances must have a parse method which is invoked without arguments and must return a `Node`. + */ + Parser: ParserConstructor | ParserFunction + + /** + * Compile a syntax tree to text. + * + * @param node unist node + * @param file `VFile` or anything which can be given to `vfile()` + * @returns String representation of the syntax tree file + */ + stringify(node: Node, file?: VFileCompatible): string + + /** + * Function handling the compilation of syntax tree to a text. + * Used in the stringify phase in the process and invoked with a `Node` and `VFile` representation of the document to stringify. + * + * `Compiler` can be a normal function in which case it must return a `string`: the text representation of the given syntax tree. + * + * `Compiler` can also be a constructor function (a function with keys in its `prototype`) in which case it’s invoked with `new`. + * Instances must have a `compile` method which is invoked without arguments and must return a `string`. + */ + Compiler: CompilerConstructor | CompilerFunction + + /** + * Transform a syntax tree by applying plugins to it. + * + * @param node Node to transform + * @returns `Promise` if `done` is not given. Rejected with an error, or resolved with the resulting syntax tree. + */ + run(node: Node): Promise<Node> + + /** + * Transform a syntax tree by applying plugins to it. + * + * @param node Node to transform + * @param file `VFile` or anything which can be given to `vfile()` + * @returns `Promise` if `done` is not given. Rejected with an error, or resolved with the resulting syntax tree. + */ + run(node: Node, file: VFileCompatible): Promise<Node> + + /** + * Transform a syntax tree by applying plugins to it. + * + * @param node Node to transform + * @param done Invoked when transformation is complete. + */ + run(node: Node, done: RunCallback): void + + /** + * Transform a syntax tree by applying plugins to it. + * + * @param node Node to transform + * @param file `VFile` or anything which can be given to `vfile()` + * @param done Invoked when transformation is complete. + */ + run(node: Node, file: VFileCompatible, done: RunCallback): void + + /** + * Transform a syntax tree by applying plugins to it. + * + * If asynchronous plugins are configured an error is thrown. + * + * @param node Node to transform + * @param file `VFile` or anything which can be given to `vfile()` + * @returns The given syntax tree. + */ + runSync(node: Node, file?: VFileCompatible): Node + + /** + * Process the given representation of a file as configured on the processor. The process invokes `parse`, `run`, and `stringify` internally. + * @param file `VFile` or anything which can be given to `vfile()` + * @returns `Promise` if `done` is not given. + * Rejected with an error or resolved with the resulting file. + */ + process(file: VFileCompatible): Promise<VFile> + + /** + * Process the given representation of a file as configured on the processor. The process invokes `parse`, `run`, and `stringify` internally. + * @param file `VFile` or anything which can be given to `vfile()` + * @param done Invoked when the process is complete. Invoked with a fatal error, if any, and the VFile. + */ + process(file: VFileCompatible, done: ProcessCallback): void + + /** + * Process the given representation of a file as configured on the processor. The process invokes `parse`, `run`, and `stringify` internally. + * + * If asynchronous plugins are configured an error is thrown. + * + * @param file `VFile` or anything which can be given to `vfile()` + * @returns Virtual file with modified contents. + */ + processSync(file: VFileCompatible): VFile + + /** + * Get or set information in an in-memory key-value store accessible to all phases of the process. + * An example is a list of HTML elements which are self-closing, which is needed when parsing, transforming, and compiling HTML. + * + * @returns key-value store object + */ + data(): {[key: string]: unknown} + + /** + * @param key Identifier + * @returns If getting, the value at key + */ + data(key: string): unknown + + /** + * @param value Value to set. Omit if getting key + * @returns If setting, the processor on which data is invoked + */ + data(key: string, value: any): Processor<P> + + /** + * Freeze a processor. Frozen processors are meant to be extended and not to be configured or processed directly. + * + * Once a processor is frozen it cannot be unfrozen. New processors functioning just like it can be created by invoking the processor. + * + * It’s possible to freeze processors explicitly, by calling `.freeze()`, but `.parse()`, `.run()`, `.stringify()`, and `.process()` call `.freeze()` to freeze a processor too. + * + * @returns The processor on which freeze is invoked. + */ + freeze(): FrozenProcessor<P> + } + + /** + * A Plugin (Attacher) is the thing passed to `use`. + * It configures the processor and in turn can receive options. + * + * Attachers can configure processors, such as by interacting with parsers and compilers, linking them to other processors, or by specifying how the syntax tree is handled. + * + * @param settings Configuration + * @typeParam S Plugin settings + * @typeParam P Processor settings + * @returns Optional Transformer. + */ + type Plugin<S extends any[] = [Settings?], P = Settings> = Attacher<S, P> + + /** + * Configuration passed to a Plugin or Processor + */ + interface Settings { + [key: string]: unknown + } + + /** + * Presets provide a potentially sharable way to configure processors. + * They can contain multiple plugins and optionally settings as well. + * + * @typeParam P Processor settings + */ + interface Preset<S = Settings, P = Settings> { + plugins: PluggableList<P> + settings?: Settings + } + + /** + * Settings can be passed directly to the processor + * + * @typeParam P Settings applied to a processor. Useful when packaging unified with a preset parser and compiler. + */ + interface ProcessorSettings<P = Settings> { + settings: P + } + + /** + * A pairing of a plugin with its settings + * + * @typeParam S Plugin settings + * @typeParam P Processor settings + */ + type PluginTuple<S extends any[] = [Settings?], P = Settings> = [ + Plugin<S, P>, + ...S + ] + + /** + * A union of the different ways to add plugins to unified + * + * @typeParam S Plugin settings + * @typeParam P Processor settings + */ + type Pluggable<S extends any[] = [Settings?], P = Settings> = + | Plugin<S, P> + | Preset<S, P> + | PluginTuple<S, P> + + /** + * A list of plugins and presets + * + * @typeParam P Processor settings + */ + type PluggableList<P = Settings> = Array<Pluggable<any[], P>> + + /** + * An attacher is the thing passed to `use`. + * It configures the processor and in turn can receive options. + * + * Attachers can configure processors, such as by interacting with parsers and compilers, linking them to other processors, or by specifying how the syntax tree is handled. + * + * @param settings Configuration + * @typeParam S Plugin settings + * @typeParam P Processor settings + * @returns Optional Transformer. + */ + type Attacher<S extends any[] = [Settings?], P = Settings> = ( + this: Processor<P>, + ...settings: S + ) => Transformer | void + + /** + * Transformers modify the syntax tree or metadata of a file. A transformer is a function which is invoked each time a file is passed through the transform phase. + * If an error occurs (either because it’s thrown, returned, rejected, or passed to `next`), the process stops. + * + * The transformation process in unified is handled by `trough`, see it’s documentation for the exact semantics of transformers. + * + * @param node Node or tree to be transformed + * @param file File associated with node or tree + * @param next If the signature of a transformer includes `next` (third argument), the function may finish asynchronous, and must invoke `next()`. + * @returns + * - `void` — If nothing is returned, the next transformer keeps using same tree. + * - `Error` — Can be returned to stop the process + * - `Node` — Can be returned and results in further transformations and `stringify`s to be performed on the new tree + * - `Promise` — If a promise is returned, the function is asynchronous, and must be resolved (optionally with a `Node`) or rejected (optionally with an `Error`) + */ + type Transformer = ( + node: Node, + file: VFile, + next?: ( + error: Error | null, + tree: Node, + file: VFile + ) => Record<string, unknown> + ) => Error | Node | Promise<Node> | void | Promise<void> + + /** + * Transform file contents into an AST + */ + interface Parser { + /** + * Transform file contents into an AST + * + * @returns Parsed AST node/tree + */ + parse(): Node + } + + /** + * A constructor function (a function with keys in its `prototype`) or class that implements a + * `parse` method. + */ + type ParserConstructor = new (text: string, file: VFile) => Parser + + /** + * Transform file contents into an AST + * + * @param text Text to transform into AST node(s) + * @param file File associated with text + * @returns Parsed AST node/tree + */ + type ParserFunction = (text: string, file: VFile) => Node + + /** + * Transform an AST node/tree into text + */ + interface Compiler { + /** + * Transform an AST node/tree into text + * + * @returns Compiled text + */ + compile(): string + } + + /** + * A constructor function (a function with keys in its `prototype`) or class that implements a + * `compile` method. + */ + type CompilerConstructor = new (node: Node, file: VFile) => Compiler + + /** + * Transform an AST node/tree into text + * + * @param node Node/tree to be stringified + * @param file File associated with node + * @returns Compiled text + */ + type CompilerFunction = (node: Node, file: VFile) => string + + /** + * Access results from transforms + * + * @param error Error if any occurred + * @param node Transformed AST tree/node + * @param vfile File associated with node + */ + type RunCallback = (error: Error | null, node: Node, file: VFile) => void + + /** + * Access results from transforms + * + * @param error Error if any occurred + * @param vfile File with updated content + */ + type ProcessCallback = (error: Error | null, file: VFile) => void +} + +/** + * Unified processor allows plugins, parsers, and compilers to be chained together to transform content. + * + * @typeParam P Processor settings. Useful when packaging unified with a preset parser and compiler. + */ +declare function unified<P = unified.Settings>(): unified.Processor<P> +export = unified diff --git a/node_modules/unist-util-is/convert.d.ts b/node_modules/unist-util-is/convert.d.ts new file mode 100644 index 00000000..96b64df3 --- /dev/null +++ b/node_modules/unist-util-is/convert.d.ts @@ -0,0 +1,6 @@ +import {Test, TestFunction} from '.' +import {Node} from 'unist' + +declare function convert<T extends Node>(test: Test<T>): TestFunction<T> + +export = convert diff --git a/node_modules/unist-util-is/convert.js b/node_modules/unist-util-is/convert.js index f92f34f1..938ff36d 100644 --- a/node_modules/unist-util-is/convert.js +++ b/node_modules/unist-util-is/convert.js @@ -3,16 +3,16 @@ module.exports = convert function convert(test) { - if (typeof test === 'string') { - return typeFactory(test) + if (test == null) { + return ok } - if (test === null || test === undefined) { - return ok + if (typeof test === 'string') { + return typeFactory(test) } if (typeof test === 'object') { - return ('length' in test ? anyFactory : matchesFactory)(test) + return 'length' in test ? anyFactory(test) : allFactory(test) } if (typeof test === 'function') { @@ -22,30 +22,16 @@ function convert(test) { throw new Error('Expected function, string, or object as test') } -function convertAll(tests) { - var results = [] - var length = tests.length - var index = -1 - - while (++index < length) { - results[index] = convert(tests[index]) - } - - return results -} - // Utility assert each property in `test` is represented in `node`, and each // values are strictly equal. -function matchesFactory(test) { - return matches +function allFactory(test) { + return all - function matches(node) { + function all(node) { var key for (key in test) { - if (node[key] !== test[key]) { - return false - } + if (node[key] !== test[key]) return false } return true @@ -53,15 +39,19 @@ function matchesFactory(test) { } function anyFactory(tests) { - var checks = convertAll(tests) - var length = checks.length + var checks = [] + var index = -1 + + while (++index < tests.length) { + checks[index] = convert(tests[index]) + } - return matches + return any - function matches() { + function any() { var index = -1 - while (++index < length) { + while (++index < checks.length) { if (checks[index].apply(this, arguments)) { return true } diff --git a/node_modules/unist-util-is/index.d.ts b/node_modules/unist-util-is/index.d.ts new file mode 100644 index 00000000..0cea73f9 --- /dev/null +++ b/node_modules/unist-util-is/index.d.ts @@ -0,0 +1,71 @@ +// TypeScript Version: 3.5 + +import {Node, Parent} from 'unist' + +declare namespace unistUtilIs { + /** + * Check that type property matches expectation for a node + * + * @typeParam T type of node that passes test + */ + type TestType<T extends Node> = T['type'] + + /** + * Check that some attributes on a node are matched + * + * @typeParam T type of node that passes test + */ + type TestObject<T extends Node> = Partial<T> + + /** + * Check if a node passes a test + * + * @param node node to check + * @param index index of node in parent + * @param parent parent of node + * @typeParam T type of node that passes test + * @returns true if type T is found, false otherwise + */ + type TestFunction<T extends Node> = ( + node: unknown, + index?: number, + parent?: Parent + ) => node is T + + /** + * Union of all the types of tests + * + * @typeParam T type of node that passes test + */ + type Test<T extends Node> = + | TestType<T> + | TestObject<T> + | TestFunction<T> + | null + | undefined +} + +/** + * Unist utility to check if a node passes a test. + * + * @param node Node to check. + * @param test When nullish, checks if `node` is a `Node`. + * When `string`, works like passing `function (node) {return node.type === test}`. + * When `function` checks if function passed the node is true. + * When `object`, checks that all keys in test are in node, and that they have (strictly) equal values. + * When `array`, checks any one of the subtests pass. + * @param index Position of `node` in `parent` + * @param parent Parent of `node` + * @param context Context object to invoke `test` with + * @typeParam T type that node is compared with + * @returns Whether test passed and `node` is a `Node` (object with `type` set to non-empty `string`). + */ +declare function unistUtilIs<T extends Node>( + node: unknown, + test?: unistUtilIs.Test<T> | Array<unistUtilIs.Test<any>>, + index?: number, + parent?: Parent, + context?: any +): node is T + +export = unistUtilIs diff --git a/node_modules/unist-util-is/index.js b/node_modules/unist-util-is/index.js index f18d416e..a8e15786 100644 --- a/node_modules/unist-util-is/index.js +++ b/node_modules/unist-util-is/index.js @@ -8,30 +8,25 @@ is.convert = convert // Assert if `test` passes for `node`. // When a `parent` node is known the `index` of node should also be given. -// eslint-disable-next-line max-params function is(node, test, index, parent, context) { - var hasParent = parent !== null && parent !== undefined - var hasIndex = index !== null && index !== undefined var check = convert(test) if ( - hasIndex && + index != null && (typeof index !== 'number' || index < 0 || index === Infinity) ) { - throw new Error('Expected positive finite index or child node') + throw new Error('Expected positive finite index') } - if (hasParent && (!is(parent) || !parent.children)) { + if (parent != null && (!is(parent) || !parent.children)) { throw new Error('Expected parent node') } - if (!node || !node.type || typeof node.type !== 'string') { - return false - } - - if (hasParent !== hasIndex) { + if ((parent == null) !== (index == null)) { throw new Error('Expected both parent and index') } - return Boolean(check.call(context, node, index, parent)) + return node && node.type && typeof node.type === 'string' + ? Boolean(check.call(context, node, index, parent)) + : false } diff --git a/node_modules/unist-util-is/package.json b/node_modules/unist-util-is/package.json index 25193acd..2064685c 100644 --- a/node_modules/unist-util-is/package.json +++ b/node_modules/unist-util-is/package.json @@ -1,47 +1,64 @@ { "name": "unist-util-is", - "version": "3.0.0", - "description": "Utility to check if a node passes a test", + "version": "4.1.0", + "description": "unist utility to check if a node passes a test", "license": "MIT", "keywords": [ "unist", + "unist-util", + "util", + "utility", + "tree", "node", "is", "equal", + "check", "test", - "type", - "util", - "utility" + "type" ], "repository": "syntax-tree/unist-util-is", "bugs": "https://github.com/syntax-tree/unist-util-is/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", "contributors": [ - "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)" + "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", + "Christian Murphy <christian.murphy.42@gmail.com>", + "Lucas Brandstaetter <lucas@brandstaetter.tech> (https://github.com/Roang-zero1)" ], "files": [ "index.js", - "convert.js" + "convert.js", + "index.d.ts", + "convert.d.ts" ], - "dependencies": {}, + "types": "index.d.ts", "devDependencies": { - "browserify": "^16.0.0", - "nyc": "^14.0.0", - "prettier": "^1.0.0", - "remark-cli": "^6.0.0", - "remark-preset-wooorm": "^5.0.0", - "tape": "^4.0.0", - "tinyify": "^2.0.0", - "xo": "^0.24.0" + "@types/mdast": "^3.0.0", + "browserify": "^17.0.0", + "dtslint": "^4.0.0", + "fast-check": "^2.0.0", + "lodash": "^4.0.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "tinyify": "^3.0.0", + "unified": "^9.0.0", + "xo": "^0.38.0" }, "scripts": { - "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", - "build-bundle": "browserify . -s unistUtilIs > unist-util-is.js", - "build-mangle": "browserify . -s unistUtilIs -p tinyify > unist-util-is.min.js", + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", + "build-bundle": "browserify . -s unistUtilIs -o unist-util-is.js", + "build-mangle": "browserify . -s unistUtilIs -o unist-util-is.min.js -p tinyify", "build": "npm run build-bundle && npm run build-mangle", "test-api": "node test", - "test-coverage": "nyc --reporter lcov tape test.js", - "test": "npm run format && npm run build && npm run test-coverage" + "test-coverage": "nyc --reporter lcov tape test", + "test-types": "dtslint .", + "test": "npm run format && npm run build && npm run test-coverage && npm run test-types" }, "prettier": { "tabWidth": 2, @@ -55,9 +72,22 @@ "prettier": true, "esnext": false, "rules": { + "@typescript-eslint/no-unused-expressions": "off", + "eqeqeq": [ + "error", + "always", + { + "null": "ignore" + } + ], + "max-params": "off", + "no-eq-null": "off", + "unicorn/prefer-number-properties": "off", + "unicorn/prefer-reflect-apply": "off", "unicorn/prefer-type-error": "off" }, "ignore": [ + "*.ts", "unist-util-is.js" ] }, diff --git a/node_modules/unist-util-is/readme.md b/node_modules/unist-util-is/readme.md index 7d53629a..a2097627 100644 --- a/node_modules/unist-util-is/readme.md +++ b/node_modules/unist-util-is/readme.md @@ -18,7 +18,7 @@ npm install unist-util-is ``` -## Usage +## Use ```js var is = require('unist-util-is') @@ -53,7 +53,7 @@ is(node, test, 5, parent) // => true * `node` ([`Node`][node]) — Node to check. * `test` ([`Function`][test], `string`, `Object`, or `Array.<Test>`, optional) - — When not given, checks if `node` is a [`Node`][node]. + — When nullish, checks if `node` is a [`Node`][node]. When `string`, works like passing `node => node.type === test`. When `array`, checks if any one of the subtests pass. When `object`, checks that all keys in `test` are in `node`, @@ -90,6 +90,9 @@ Create a test function from `test`, that can later be called with a `node`, Useful if you’re going to test many nodes, for example when creating a utility where something else passes an is-compatible test. +The created function is slightly faster because it expects valid input only. +Therefore, passing invalid input, yields unexpected results. + Can also be accessed with `require('unist-util-is/convert')`. For example: @@ -115,7 +118,7 @@ console.log(leafs) Yields: ```js -[({type: 'leaf', value: '2'}, {type: 'leaf', value: '5'})] +[{type: 'leaf', value: '2'}, {type: 'leaf', value: '5'}] ``` ## Related @@ -130,11 +133,9 @@ Yields: — Find all nodes before another node * [`unist-util-find-all-between`](https://github.com/mrzmmr/unist-util-find-all-between) — Find all nodes between two nodes -* [`unist-util-find`](https://github.com/blahah/unist-util-find) - — Find nodes matching a predicate -* [`unist-util-filter`](https://github.com/eush77/unist-util-filter) +* [`unist-util-filter`](https://github.com/syntax-tree/unist-util-filter) — Create a new tree with nodes that pass a check -* [`unist-util-remove`](https://github.com/eush77/unist-util-remove) +* [`unist-util-remove`](https://github.com/syntax-tree/unist-util-remove) — Remove nodes from tree ## Contribute @@ -143,8 +144,8 @@ See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get started. See [`support.md`][support] for ways to get help. -This project has a [Code of Conduct][coc]. -By interacting with this repository, organisation, or community you agree to +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to abide by its terms. ## License @@ -153,9 +154,9 @@ abide by its terms. <!-- Definitions --> -[build-badge]: https://img.shields.io/travis/syntax-tree/unist-util-is.svg +[build-badge]: https://github.com/syntax-tree/unist-util-is/workflows/main/badge.svg -[build]: https://travis-ci.org/syntax-tree/unist-util-is +[build]: https://github.com/syntax-tree/unist-util-is/actions [coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/unist-util-is.svg @@ -175,9 +176,9 @@ abide by its terms. [collective]: https://opencollective.com/unified -[chat-badge]: https://img.shields.io/badge/join%20the%20community-on%20spectrum-7b16ff.svg +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg -[chat]: https://spectrum.chat/unified/syntax-tree +[chat]: https://github.com/syntax-tree/unist/discussions [npm]: https://docs.npmjs.com/cli/install @@ -185,11 +186,11 @@ abide by its terms. [author]: https://wooorm.com -[contributing]: https://github.com/syntax-tree/.github/blob/master/contributing.md +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md -[support]: https://github.com/syntax-tree/.github/blob/master/support.md +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md -[coc]: https://github.com/syntax-tree/.github/blob/master/code-of-conduct.md +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md [unist]: https://github.com/syntax-tree/unist diff --git a/node_modules/unist-util-remove-position/index.js b/node_modules/unist-util-remove-position/index.js deleted file mode 100644 index 09639598..00000000 --- a/node_modules/unist-util-remove-position/index.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' - -var visit = require('unist-util-visit') - -module.exports = removePosition - -function removePosition(node, force) { - visit(node, force ? hard : soft) - return node -} - -function hard(node) { - delete node.position -} - -function soft(node) { - node.position = undefined -} diff --git a/node_modules/unist-util-remove-position/package.json b/node_modules/unist-util-remove-position/package.json deleted file mode 100644 index e1166471..00000000 --- a/node_modules/unist-util-remove-position/package.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "name": "unist-util-remove-position", - "version": "1.1.4", - "description": "Remove `position`s from a unist tree", - "license": "MIT", - "keywords": [ - "unist", - "utility", - "remove", - "position", - "location" - ], - "repository": "syntax-tree/unist-util-remove-position", - "bugs": "https://github.com/syntax-tree/unist-util-remove-position/issues", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - }, - "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", - "contributors": [ - "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)" - ], - "files": [ - "index.js" - ], - "dependencies": { - "unist-util-visit": "^1.1.0" - }, - "devDependencies": { - "browserify": "^16.0.0", - "nyc": "^14.0.0", - "prettier": "^1.0.0", - "remark": "^11.0.0", - "remark-cli": "^7.0.0", - "remark-preset-wooorm": "^6.0.0", - "tape": "^4.0.0", - "tinyify": "^2.0.0", - "unist-builder": "^2.0.0", - "xo": "^0.25.0" - }, - "scripts": { - "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", - "build-bundle": "browserify . -s unistUtilRemovePosition > unist-util-remove-position.js", - "build-mangle": "browserify . -s unistUtilRemovePosition -p tinyify > unist-util-remove-position.min.js", - "build": "npm run build-bundle && npm run build-mangle", - "test-api": "node test", - "test-coverage": "nyc --reporter lcov tape test.js", - "test": "npm run format && npm run build && npm run test-coverage" - }, - "prettier": { - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "bracketSpacing": false, - "semi": false, - "trailingComma": "none" - }, - "xo": { - "prettier": true, - "esnext": false, - "ignores": [ - "unist-util-remove-position.js" - ] - }, - "nyc": { - "check-coverage": true, - "lines": 100, - "functions": 100, - "branches": 100 - }, - "remarkConfig": { - "plugins": [ - "preset-wooorm" - ] - } -} diff --git a/node_modules/unist-util-remove-position/readme.md b/node_modules/unist-util-remove-position/readme.md deleted file mode 100644 index e79ed14b..00000000 --- a/node_modules/unist-util-remove-position/readme.md +++ /dev/null @@ -1,131 +0,0 @@ -# unist-util-remove-position - -[![Build][build-badge]][build] -[![Coverage][coverage-badge]][coverage] -[![Downloads][downloads-badge]][downloads] -[![Size][size-badge]][size] -[![Sponsors][sponsors-badge]][collective] -[![Backers][backers-badge]][collective] -[![Chat][chat-badge]][chat] - -[**unist**][unist] utility to remove [`position`][position]s from tree. - -## Install - -[npm][]: - -```sh -npm install unist-util-remove-position -``` - -## Usage - -```js -var remark = require('remark') -var removePosition = require('unist-util-remove-position') - -var tree = remark().parse('Some _emphasis_, **importance**, and `code`.') - -removePosition(tree, true) - -console.dir(tree, {depth: null}) -``` - -Yields: - -```js -{ - type: 'root', - children: [ - { - type: 'paragraph', - children: [ - { type: 'text', value: 'Some ' }, - { - type: 'emphasis', - children: [ { type: 'text', value: 'emphasis' } ] - }, - { type: 'text', value: ', ' }, - { - type: 'strong', - children: [ { type: 'text', value: 'importance' } ] - }, - { type: 'text', value: ', and ' }, - { type: 'inlineCode', value: 'code' }, - { type: 'text', value: '.' } - ] - } - ] -} -``` - -## API - -### `removePosition(node[, force])` - -Remove [`position`][position]s from [`node`][node]. -If `force` is given, uses `delete`, otherwise, sets `position`s to `undefined`. - -###### Returns - -The given `node`. - -## Contribute - -See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get -started. -See [`support.md`][support] for ways to get help. - -This project has a [Code of Conduct][coc]. -By interacting with this repository, organisation, or community you agree to -abide by its terms. - -## License - -[MIT][license] © [Titus Wormer][author] - -<!-- Definitions --> - -[build-badge]: https://img.shields.io/travis/syntax-tree/unist-util-remove-position.svg - -[build]: https://travis-ci.org/syntax-tree/unist-util-remove-position - -[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/unist-util-remove-position.svg - -[coverage]: https://codecov.io/github/syntax-tree/unist-util-remove-position - -[downloads-badge]: https://img.shields.io/npm/dm/unist-util-remove-position.svg - -[downloads]: https://www.npmjs.com/package/unist-util-remove-position - -[size-badge]: https://img.shields.io/bundlephobia/minzip/unist-util-remove-position.svg - -[size]: https://bundlephobia.com/result?p=unist-util-remove-position - -[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg - -[backers-badge]: https://opencollective.com/unified/backers/badge.svg - -[collective]: https://opencollective.com/unified - -[chat-badge]: https://img.shields.io/badge/join%20the%20community-on%20spectrum-7b16ff.svg - -[chat]: https://spectrum.chat/unified/syntax-tree - -[npm]: https://docs.npmjs.com/cli/install - -[license]: license - -[author]: https://wooorm.com - -[contributing]: https://github.com/syntax-tree/.github/blob/master/contributing.md - -[support]: https://github.com/syntax-tree/.github/blob/master/support.md - -[coc]: https://github.com/syntax-tree/.github/blob/master/code-of-conduct.md - -[unist]: https://github.com/syntax-tree/unist - -[position]: https://github.com/syntax-tree/unist#position - -[node]: https://github.com/syntax-tree/unist#node diff --git a/node_modules/unist-util-stringify-position/index.js b/node_modules/unist-util-stringify-position/index.js index 3be1e142..3d78a444 100644 --- a/node_modules/unist-util-stringify-position/index.js +++ b/node_modules/unist-util-stringify-position/index.js @@ -5,28 +5,28 @@ var own = {}.hasOwnProperty module.exports = stringify function stringify(value) { - /* Nothing. */ + // Nothing. if (!value || typeof value !== 'object') { - return null + return '' } - /* Node. */ + // Node. if (own.call(value, 'position') || own.call(value, 'type')) { return position(value.position) } - /* Position. */ + // Position. if (own.call(value, 'start') || own.call(value, 'end')) { return position(value) } - /* Point. */ + // Point. if (own.call(value, 'line') || own.call(value, 'column')) { return point(value) } - /* ? */ - return null + // ? + return '' } function point(point) { diff --git a/node_modules/unist-util-stringify-position/package.json b/node_modules/unist-util-stringify-position/package.json index 2e20b672..0f35015d 100644 --- a/node_modules/unist-util-stringify-position/package.json +++ b/node_modules/unist-util-stringify-position/package.json @@ -1,47 +1,59 @@ { "name": "unist-util-stringify-position", - "version": "1.1.2", - "description": "Stringify a Unist node, position, or point", + "version": "2.0.3", + "description": "unist utility to serialize a node, position, or point as a human readable location", "license": "MIT", "keywords": [ "unist", + "unist-util", + "util", + "utility", "position", "location", "point", "node", "stringify", - "tostring", - "util", - "utility" + "tostring" ], "repository": "syntax-tree/unist-util-stringify-position", "bugs": "https://github.com/syntax-tree/unist-util-stringify-position/issues", - "author": "Titus Wormer <tituswormer@gmail.com> (http://wooorm.com)", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", "contributors": [ - "Titus Wormer <tituswormer@gmail.com> (http://wooorm.com)" + "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)" ], + "types": "types/index.d.ts", "files": [ + "types/index.d.ts", "index.js" ], - "dependencies": {}, + "dependencies": { + "@types/unist": "^2.0.2" + }, "devDependencies": { "browserify": "^16.0.0", - "esmangle": "^1.0.0", - "nyc": "^11.0.0", - "prettier": "^1.12.1", - "remark-cli": "^5.0.0", - "remark-preset-wooorm": "^4.0.0", - "tape": "^4.5.1", - "xo": "^0.20.0" + "dtslint": "^3.0.0", + "nyc": "^15.0.0", + "prettier": "^1.0.0", + "remark-cli": "^7.0.0", + "remark-preset-wooorm": "^6.0.0", + "tape": "^4.0.0", + "tinyify": "^2.0.0", + "typescript": "^3.0.0", + "xo": "^0.27.0" }, "scripts": { - "format": "remark . -qfo && prettier --write '**/*.js' && xo --fix", - "build-bundle": "browserify index.js --no-builtins -s unistUtilStringifyPosition > unist-util-stringify-position.js", - "build-mangle": "esmangle unist-util-stringify-position.js > unist-util-stringify-position.min.js", + "format": "remark . -qfo && prettier --write \"**/*.{js,ts}\" && xo --fix", + "build-bundle": "browserify . -s unistUtilStringifyPosition > unist-util-stringify-position.js", + "build-mangle": "browserify . -s unistUtilStringifyPosition -p tinyify > unist-util-stringify-position.min.js", "build": "npm run build-bundle && npm run build-mangle", "test-api": "node test", "test-coverage": "nyc --reporter lcov tape test.js", - "test": "npm run format && npm run build && npm run test-coverage" + "test-types": "dtslint types", + "test": "npm run format && npm run build && npm run test-coverage && npm run test-types" }, "nyc": { "check-coverage": true, @@ -60,11 +72,6 @@ "xo": { "prettier": true, "esnext": false, - "rules": { - "guard-for-in": "off", - "no-var": "off", - "prefer-arrow-callback": "off" - }, "ignores": [ "unist-util-stringify-position.js" ] diff --git a/node_modules/unist-util-stringify-position/readme.md b/node_modules/unist-util-stringify-position/readme.md index 85c753b5..bb565149 100644 --- a/node_modules/unist-util-stringify-position/readme.md +++ b/node_modules/unist-util-stringify-position/readme.md @@ -1,28 +1,33 @@ -# unist-util-stringify-position [![Build Status][build-badge]][build-page] [![Coverage Status][coverage-badge]][coverage-page] +# unist-util-stringify-position -Stringify a [**Unist**][unist] [`Position`][position] or [`Point`][point]. +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] -## Installation +[**unist**][unist] utility to pretty print the positional information of a node. + +## Install [npm][]: -```bash +```sh npm install unist-util-stringify-position ``` -## Usage +## Use -```javascript +```js var stringify = require('unist-util-stringify-position') // Point stringify({line: 2, column: 3}) // => '2:3' // Position -stringify({ - start: {line: 2}, - end: {line: 3} -}) // => '2:1-3:1' +stringify({start: {line: 2}, end: {line: 3}}) // => '2:1-3:1' // Node stringify({ @@ -39,8 +44,8 @@ stringify({ ### `stringifyPosition(node|position|point)` -Stringify one point, a position (start and end points), or -a node’s position. +Stringify one [point][], a [position][] (start and end [point][]s), or a node’s +[positional information][positional-information]. ###### Parameters @@ -53,19 +58,32 @@ a node’s position. ###### Returns -`string?` — A range `ls:cs-le:ce` (when given `node` or -`position`) or a point `l:c` (when given `point`), where `l` stands -for line, `c` for column, `s` for `start`, and `e` for -end. `null` is returned if the given value is neither `node`, +`string?` — A range `ls:cs-le:ce` (when given `node` or `position`) or a point +`l:c` (when given `point`), where `l` stands for line, `c` for column, `s` for +`start`, and `e` for end. +An empty string (`''`) is returned if the given value is neither `node`, `position`, nor `point`. +## Related + +* [`unist-util-generated`](https://github.com/syntax-tree/unist-util-generated) + — Check if a node is generated +* [`unist-util-position`](https://github.com/syntax-tree/unist-util-position) + — Get positional info of nodes +* [`unist-util-remove-position`](https://github.com/syntax-tree/unist-util-remove-position) + — Remove positional info from trees +* [`unist-util-source`](https://github.com/syntax-tree/unist-util-source) + — Get the source of a value (node or position) in a file + ## Contribute -See [`contributing.md` in `syntax-tree/unist`][contributing] for ways to get +See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get started. +See [`support.md`][support] for ways to get help. -This organisation has a [Code of Conduct][coc]. By interacting with this -repository, organisation, or community you agree to abide by its terms. +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. ## License @@ -75,17 +93,41 @@ repository, organisation, or community you agree to abide by its terms. [build-badge]: https://img.shields.io/travis/syntax-tree/unist-util-stringify-position.svg -[build-page]: https://travis-ci.org/syntax-tree/unist-util-stringify-position +[build]: https://travis-ci.org/syntax-tree/unist-util-stringify-position [coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/unist-util-stringify-position.svg -[coverage-page]: https://codecov.io/github/syntax-tree/unist-util-stringify-position?branch=master +[coverage]: https://codecov.io/github/syntax-tree/unist-util-stringify-position + +[downloads-badge]: https://img.shields.io/npm/dm/unist-util-stringify-position.svg + +[downloads]: https://www.npmjs.com/package/unist-util-stringify-position + +[size-badge]: https://img.shields.io/bundlephobia/minzip/unist-util-stringify-position.svg + +[size]: https://bundlephobia.com/result?p=unist-util-stringify-position + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-spectrum-7b16ff.svg + +[chat]: https://spectrum.chat/unified/syntax-tree [npm]: https://docs.npmjs.com/cli/install -[license]: LICENSE +[license]: license -[author]: http://wooorm.com +[author]: https://wooorm.com + +[contributing]: https://github.com/syntax-tree/.github/blob/master/contributing.md + +[support]: https://github.com/syntax-tree/.github/blob/master/support.md + +[coc]: https://github.com/syntax-tree/.github/blob/master/code-of-conduct.md [unist]: https://github.com/syntax-tree/unist @@ -95,6 +137,4 @@ repository, organisation, or community you agree to abide by its terms. [point]: https://github.com/syntax-tree/unist#point -[contributing]: https://github.com/syntax-tree/unist/blob/master/contributing.md - -[coc]: https://github.com/syntax-tree/unist/blob/master/code-of-conduct.md +[positional-information]: https://github.com/syntax-tree/unist#positional-information diff --git a/node_modules/unist-util-stringify-position/types/index.d.ts b/node_modules/unist-util-stringify-position/types/index.d.ts new file mode 100644 index 00000000..8f31bc04 --- /dev/null +++ b/node_modules/unist-util-stringify-position/types/index.d.ts @@ -0,0 +1,9 @@ +// TypeScript Version: 3.0 + +import * as Unist from 'unist' + +declare function unistUtilStringifyPosition( + value: Unist.Node | Unist.Position | Unist.Point +): string + +export = unistUtilStringifyPosition diff --git a/node_modules/unist-util-visit-parents/color.browser.js b/node_modules/unist-util-visit-parents/color.browser.js new file mode 100644 index 00000000..db22a1e6 --- /dev/null +++ b/node_modules/unist-util-visit-parents/color.browser.js @@ -0,0 +1,4 @@ +module.exports = identity +function identity(d) { + return d +} diff --git a/node_modules/unist-util-visit-parents/color.js b/node_modules/unist-util-visit-parents/color.js new file mode 100644 index 00000000..2e2974c9 --- /dev/null +++ b/node_modules/unist-util-visit-parents/color.js @@ -0,0 +1,4 @@ +module.exports = color +function color(d) { + return '\u001B[33m' + d + '\u001B[39m' +} diff --git a/node_modules/unist-util-visit-parents/index.js b/node_modules/unist-util-visit-parents/index.js index c7263592..c24b8f5f 100644 --- a/node_modules/unist-util-visit-parents/index.js +++ b/node_modules/unist-util-visit-parents/index.js @@ -3,6 +3,7 @@ module.exports = visitParents var convert = require('unist-util-is/convert') +var color = require('./color') var CONTINUE = true var SKIP = 'skip' @@ -13,6 +14,7 @@ visitParents.SKIP = SKIP visitParents.EXIT = EXIT function visitParents(tree, test, visitor, reverse) { + var step var is if (typeof test === 'function' && typeof visitor !== 'function') { @@ -22,45 +24,58 @@ function visitParents(tree, test, visitor, reverse) { } is = convert(test) + step = reverse ? -1 : 1 - one(tree, null, []) + factory(tree, null, [])() - // Visit a single node. - function one(node, index, parents) { - var result = [] - var subresult + function factory(node, index, parents) { + var value = typeof node === 'object' && node !== null ? node : {} + var name - if (!test || is(node, index, parents[parents.length - 1] || null)) { - result = toResult(visitor(node, parents)) + if (typeof value.type === 'string') { + name = + typeof value.tagName === 'string' + ? value.tagName + : typeof value.name === 'string' + ? value.name + : undefined - if (result[0] === EXIT) { - return result - } + visit.displayName = + 'node (' + color(value.type + (name ? '<' + name + '>' : '')) + ')' } - if (node.children && result[0] !== SKIP) { - subresult = toResult(all(node.children, parents.concat(node))) - return subresult[0] === EXIT ? subresult : result - } + return visit - return result - } + function visit() { + var grandparents = parents.concat(node) + var result = [] + var subresult + var offset + + if (!test || is(node, index, parents[parents.length - 1] || null)) { + result = toResult(visitor(node, parents)) + + if (result[0] === EXIT) { + return result + } + } + + if (node.children && result[0] !== SKIP) { + offset = (reverse ? node.children.length : -1) + step - // Visit children in `parent`. - function all(children, parents) { - var min = -1 - var step = reverse ? -1 : 1 - var index = (reverse ? children.length : min) + step - var result + while (offset > -1 && offset < node.children.length) { + subresult = factory(node.children[offset], offset, grandparents)() - while (index > min && index < children.length) { - result = one(children[index], index, parents) + if (subresult[0] === EXIT) { + return subresult + } - if (result[0] === EXIT) { - return result + offset = + typeof subresult[1] === 'number' ? subresult[1] : offset + step + } } - index = typeof result[1] === 'number' ? result[1] : index + step + return result } } } diff --git a/node_modules/unist-util-visit-parents/package.json b/node_modules/unist-util-visit-parents/package.json index 26e18720..85d2c517 100644 --- a/node_modules/unist-util-visit-parents/package.json +++ b/node_modules/unist-util-visit-parents/package.json @@ -1,45 +1,74 @@ { "name": "unist-util-visit-parents", - "version": "2.1.2", - "description": "Recursively walk over unist nodes, with ancestral information", + "version": "3.1.1", + "description": "unist utility to recursively walk over nodes, with ancestral information", "license": "MIT", "keywords": [ "unist", - "walk", + "unist-util", "util", - "utility" + "utility", + "tree", + "ast", + "visit", + "traverse", + "walk", + "check", + "parent", + "parents" ], "repository": "syntax-tree/unist-util-visit-parents", "bugs": "https://github.com/syntax-tree/unist-util-visit-parents/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", "contributors": [ "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)" ], + "browser": { + "./color.js": "./color.browser.js" + }, + "react-native": { + "./color.js": "./color.browser.js" + }, "files": [ - "index.js" + "index.js", + "color.js", + "color.browser.js", + "types/index.d.ts" ], + "types": "types/index.d.ts", "dependencies": { - "unist-util-is": "^3.0.0" + "@types/unist": "^2.0.0", + "unist-util-is": "^4.0.0" }, "devDependencies": { - "browserify": "^16.0.0", - "nyc": "^14.0.0", - "prettier": "^1.0.0", - "remark": "^10.0.0", - "remark-cli": "^6.0.0", - "remark-preset-wooorm": "^5.0.0", - "tape": "^4.0.0", - "tinyify": "^2.0.0", - "xo": "^0.24.0" + "browserify": "^17.0.0", + "dtslint": "^4.0.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark": "^13.0.0", + "remark-cli": "^9.0.0", + "remark-gfm": "^1.0.0", + "remark-preset-wooorm": "^8.0.0", + "strip-ansi": "^6.0.0", + "tape": "^5.0.0", + "tinyify": "^3.0.0", + "typescript": "^4.0.0", + "unified": "^9.0.0", + "xo": "^0.34.0" }, "scripts": { - "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", + "format": "remark . -qfo && prettier . --write && xo --fix", "build-bundle": "browserify index.js -s unistUtilVisitParents > unist-util-visit-parents.js", "build-mangle": "browserify index.js -s unistUtilVisitParents -p tinyify > unist-util-visit-parents.min.js", "build": "npm run build-bundle && npm run build-mangle", "test-api": "node test", "test-coverage": "nyc --reporter lcov tape test.js", - "test": "npm run format && npm run build && npm run test-coverage" + "test-types": "dtslint types", + "test": "npm run format && npm run build && npm run test-coverage && npm run test-types" }, "nyc": { "check-coverage": true, @@ -58,7 +87,12 @@ "xo": { "prettier": true, "esnext": false, + "rules": { + "unicorn/prefer-set-has": "off", + "unicorn/prefer-reflect-apply": "off" + }, "ignores": [ + "types/", "unist-util-visit-parents.js" ] }, diff --git a/node_modules/unist-util-visit-parents/readme.md b/node_modules/unist-util-visit-parents/readme.md index ec7efc7b..72417534 100644 --- a/node_modules/unist-util-visit-parents/readme.md +++ b/node_modules/unist-util-visit-parents/readme.md @@ -18,7 +18,7 @@ npm install unist-util-visit-parents ``` -## Usage +## Use ```js var remark = require('remark') @@ -52,8 +52,20 @@ Yields: ### `visit(tree[, test], visitor[, reverse])` -Visit nodes ([**inclusive descendants**][descendant] of [`tree`][tree]), with -ancestral information. Optionally filtering nodes. Optionally in reverse. +Visit nodes ([*inclusive descendants*][descendant] of [`tree`][tree]), with +ancestral information. +Optionally filtering nodes. +Optionally in reverse. + +This algorithm performs [*depth-first*][depth-first] +[*tree traversal*][tree-traversal] in [*preorder*][preorder] (**NLR**), or +if `reverse` is given, in *reverse preorder* (**NRL**). + +Walking the tree is an intensive task. +Make use of the return values of the visitor when possible. +Instead of walking a tree multiple times with different `test`s, walk it once +without a test, and use [`unist-util-is`][is] to check if a node matches a test, +and then perform different operations. ###### Parameters @@ -62,10 +74,10 @@ ancestral information. Optionally filtering nodes. Optionally in reverse. [type][]) * `visitor` ([Function][visitor]) — Function invoked when a node is found that passes `test` -* `reverse` (`boolean`, default: `false`) — The tree is walked in [preorder][] - (NLR), visiting the node itself, then its [head][], etc. - When `reverse` is passed, the tree is stilled walked in preorder, but now - in NRL (the node itself, then its [tail][], etc.) +* `reverse` (`boolean`, default: `false`) — The tree is traversed in + [preorder][] (NLR), visiting the node itself, then its [head][], etc. + When `reverse` is passed, the tree is traversed in reverse preorder (NRL): + the node itself is visited, then its [tail][], etc. #### `next? = visitor(node, ancestors)` @@ -80,7 +92,7 @@ If adding or removing previous [sibling][]s (or next siblings, in case of to specify the sibling to traverse after `node` is traversed. Adding or removing next siblings of `node` (or previous siblings, in case of reverse) is handled as expected without needing to return a new `index`. -Removing the `children` property of parent still results in them being +Removing the `children` property of an ancestor still results in them being traversed. ###### Parameters @@ -123,16 +135,15 @@ traversing the parent * [`unist-util-visit`](https://github.com/syntax-tree/unist-util-visit) — Like `visit-parents`, but with one parent -* [`unist-util-filter`](https://github.com/eush77/unist-util-filter) +* [`unist-util-filter`](https://github.com/syntax-tree/unist-util-filter) — Create a new tree with all nodes that pass a test * [`unist-util-map`](https://github.com/syntax-tree/unist-util-map) — Create a new tree with all nodes mapped by a given function * [`unist-util-flatmap`](https://gitlab.com/staltz/unist-util-flatmap) - — Create a new tree by mapping (to an array) with the provided function and - then flattening -* [`unist-util-remove`](https://github.com/eush77/unist-util-remove) + — Create a new tree by mapping (to an array) with the given function +* [`unist-util-remove`](https://github.com/syntax-tree/unist-util-remove) — Remove nodes from a tree that pass a test -* [`unist-util-select`](https://github.com/eush77/unist-util-select) +* [`unist-util-select`](https://github.com/syntax-tree/unist-util-select) — Select nodes with CSS-like selectors ## Contribute @@ -141,8 +152,8 @@ See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get started. See [`support.md`][support] for ways to get help. -This project has a [Code of Conduct][coc]. -By interacting with this repository, organisation, or community you agree to +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to abide by its terms. ## License @@ -173,9 +184,9 @@ abide by its terms. [collective]: https://opencollective.com/unified -[chat-badge]: https://img.shields.io/badge/join%20the%20community-on%20spectrum-7b16ff.svg +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg -[chat]: https://spectrum.chat/unified/syntax-tree +[chat]: https://github.com/syntax-tree/unist/discussions [npm]: https://docs.npmjs.com/cli/install @@ -189,15 +200,19 @@ abide by its terms. [visitor]: #next--visitornode-ancestors -[contributing]: https://github.com/syntax-tree/.github/blob/master/contributing.md +[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md -[support]: https://github.com/syntax-tree/.github/blob/master/support.md +[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md -[coc]: https://github.com/syntax-tree/.github/blob/master/code-of-conduct.md +[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md [is]: https://github.com/syntax-tree/unist-util-is -[preorder]: https://www.geeksforgeeks.org/tree-traversals-inorder-preorder-and-postorder/ +[depth-first]: https://github.com/syntax-tree/unist#depth-first-traversal + +[tree-traversal]: https://github.com/syntax-tree/unist#tree-traversal + +[preorder]: https://github.com/syntax-tree/unist#preorder [descendant]: https://github.com/syntax-tree/unist#descendant diff --git a/node_modules/unist-util-visit-parents/types/index.d.ts b/node_modules/unist-util-visit-parents/types/index.d.ts new file mode 100644 index 00000000..53a37c4b --- /dev/null +++ b/node_modules/unist-util-visit-parents/types/index.d.ts @@ -0,0 +1,111 @@ +// TypeScript Version: 3.5 + +import {Node, Parent} from 'unist' +import {Test} from 'unist-util-is' + +declare namespace visitParents { + /** + * Continue traversing as normal + */ + type Continue = true + + /** + * Do not traverse this node’s children + */ + type Skip = 'skip' + + /** + * Stop traversing immediately + */ + type Exit = false + + /** + * Union of the action types + */ + type Action = Continue | Skip | Exit + + /** + * List with one or two values, the first an action, the second an index. + */ + type ActionTuple = [Action, Index] + + /** + * Move to the sibling at index next (after node itself is completely traversed). + * Useful if mutating the tree, such as removing the node the visitor is currently on, + * or any of its previous siblings (or next siblings, in case of reverse) + * Results less than 0 or greater than or equal to children.length stop traversing the parent + */ + type Index = number + + /** + * Invoked when a node (matching test, if given) is found. + * Visitors are free to transform node. + * They can also transform the parent of node (the last of ancestors). + * Replacing node itself, if visit.SKIP is not returned, still causes its descendants to be visited. + * If adding or removing previous siblings (or next siblings, in case of reverse) of node, + * visitor should return a new index (number) to specify the sibling to traverse after node is traversed. + * Adding or removing next siblings of node (or previous siblings, in case of reverse) + * is handled as expected without needing to return a new index. + * Removing the children property of an ancestor still results in them being traversed. + * + * @param node Found node + * @param ancestors Ancestors of node + * @paramType V node type found + * @returns + * When Action is passed, treated as a tuple of [Action] + * When Index is passed, treated as a tuple of [CONTINUE, Index] + * When ActionTuple is passed, + * Note that passing a tuple only makes sense if the action is SKIP. + * If the action is EXIT, that action can be returned. + * If the action is CONTINUE, index can be returned. + */ + type Visitor<V extends Node> = ( + node: V, + ancestors: Node[] + ) => void | Action | Index | ActionTuple +} + +declare const visitParents: { + /** + * Visit children of tree which pass a test + * + * @param tree abstract syntax tree to visit + * @param test test node + * @param visitor function to run for each node + * @param reverse visit the tree in reverse, defaults to false + * @typeParam T tree node + * @typeParam V node type found + */ + <V extends Node>( + tree: Node, + test: Test<V> | Array<Test<any>>, + visitor: visitParents.Visitor<V>, + reverse?: boolean + ): void + + /** + * Visit children of a tree + * + * @param tree abstract syntax tree to visit + * @param visitor function to run for each node + * @param reverse visit the tree in reverse, defaults to false + */ + (tree: Node, visitor: visitParents.Visitor<Node>, reverse?: boolean): void + + /** + * Continue traversing as normal + */ + CONTINUE: visitParents.Continue + + /** + * Do not traverse this node’s children + */ + SKIP: visitParents.Skip + + /** + * Stop traversing immediately + */ + EXIT: visitParents.Exit +} + +export = visitParents diff --git a/node_modules/unist-util-visit/index.js b/node_modules/unist-util-visit/index.js deleted file mode 100644 index 39970e7d..00000000 --- a/node_modules/unist-util-visit/index.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -module.exports = visit - -var visitParents = require('unist-util-visit-parents') - -var CONTINUE = visitParents.CONTINUE -var SKIP = visitParents.SKIP -var EXIT = visitParents.EXIT - -visit.CONTINUE = CONTINUE -visit.SKIP = SKIP -visit.EXIT = EXIT - -function visit(tree, test, visitor, reverse) { - if (typeof test === 'function' && typeof visitor !== 'function') { - reverse = visitor - visitor = test - test = null - } - - visitParents(tree, test, overload, reverse) - - function overload(node, parents) { - var parent = parents[parents.length - 1] - var index = parent ? parent.children.indexOf(node) : null - return visitor(node, index, parent) - } -} diff --git a/node_modules/unist-util-visit/package.json b/node_modules/unist-util-visit/package.json deleted file mode 100644 index 44b8bd48..00000000 --- a/node_modules/unist-util-visit/package.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "name": "unist-util-visit", - "version": "1.4.1", - "description": "Recursively walk over unist nodes", - "license": "MIT", - "keywords": [ - "unist", - "remark", - "markdown", - "retext", - "natural", - "language", - "node", - "visit", - "walk", - "util", - "utility" - ], - "repository": "syntax-tree/unist-util-visit", - "bugs": "https://github.com/syntax-tree/unist-util-visit/issues", - "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", - "contributors": [ - "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", - "Eugene Sharygin <eush77@gmail.com>", - "Richard Gibson <richard.gibson@gmail.com>" - ], - "files": [ - "index.js" - ], - "dependencies": { - "unist-util-visit-parents": "^2.0.0" - }, - "devDependencies": { - "browserify": "^16.0.0", - "nyc": "^14.0.0", - "prettier": "^1.0.0", - "remark": "^10.0.0", - "remark-cli": "^6.0.0", - "remark-preset-wooorm": "^5.0.0", - "tape": "^4.0.0", - "tinyify": "^2.0.0", - "xo": "^0.24.0" - }, - "scripts": { - "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", - "build-bundle": "browserify . -s unistUtilVisit > unist-util-visit.js", - "build-mangle": "browserify . -s unistUtilVisit -p tinyify > unist-util-visit.min.js", - "build": "npm run build-bundle && npm run build-mangle", - "test-api": "node test", - "test-coverage": "nyc --reporter lcov tape test.js", - "test": "npm run format && npm run build && npm run test-coverage" - }, - "nyc": { - "check-coverage": true, - "lines": 100, - "functions": 100, - "branches": 100 - }, - "prettier": { - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "bracketSpacing": false, - "semi": false, - "trailingComma": "none" - }, - "xo": { - "prettier": true, - "esnext": false, - "ignores": [ - "unist-util-visit.js" - ] - }, - "remarkConfig": { - "plugins": [ - "preset-wooorm" - ] - } -} diff --git a/node_modules/unist-util-visit/readme.md b/node_modules/unist-util-visit/readme.md deleted file mode 100644 index 25808a27..00000000 --- a/node_modules/unist-util-visit/readme.md +++ /dev/null @@ -1,121 +0,0 @@ -# unist-util-visit - -[![Build][build-badge]][build] -[![Coverage][coverage-badge]][coverage] -[![Downloads][downloads-badge]][downloads] -[![Size][size-badge]][size] - -[**unist**][unist] utility to visit nodes. - -## Install - -[npm][]: - -```bash -npm install unist-util-visit -``` - -## Usage - -```javascript -var u = require('unist-builder') -var visit = require('unist-util-visit') - -var tree = u('tree', [ - u('leaf', '1'), - u('node', [u('leaf', '2')]), - u('void'), - u('leaf', '3') -]) - -visit(tree, 'leaf', function(node) { - console.log(node) -}) -``` - -Yields: - -```js -{ type: 'leaf', value: '1' } -{ type: 'leaf', value: '2' } -{ type: 'leaf', value: '3' } -``` - -## API - -### `visit(tree[, test], visitor[, reverse])` - -This function works exactly the same as [`unist-util-visit-parents`][vp], -but `visitor` has a different signature. - -#### `next? = visitor(node, index, parent)` - -Instead of being passed an array of ancestors, `visitor` is invoked with the -node’s [`index`][index] and its [`parent`][parent]. - -Otherwise the same as [`unist-util-visit-parents`][vp]. - -## Related - -* [`unist-util-visit-parents`][vp] - — Like `visit`, but with a stack of parents -* [`unist-util-filter`](https://github.com/eush77/unist-util-filter) - — Create a new tree with all nodes that pass a test -* [`unist-util-map`](https://github.com/syntax-tree/unist-util-map) - — Create a new tree with all nodes mapped by a given function -* [`unist-util-remove`](https://github.com/eush77/unist-util-remove) - — Remove nodes from a tree that pass a test -* [`unist-util-select`](https://github.com/eush77/unist-util-select) - — Select nodes with CSS-like selectors - -## Contribute - -See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get -started. -See [`support.md`][support] for ways to get help. - -This project has a [Code of Conduct][coc]. -By interacting with this repository, organisation, or community you agree to -abide by its terms. - -## License - -[MIT][license] © [Titus Wormer][author] - -<!-- Definition --> - -[build-badge]: https://img.shields.io/travis/syntax-tree/unist-util-visit.svg - -[build]: https://travis-ci.org/syntax-tree/unist-util-visit - -[coverage-badge]: https://img.shields.io/codecov/c/github/syntax-tree/unist-util-visit.svg - -[coverage]: https://codecov.io/github/syntax-tree/unist-util-visit - -[downloads-badge]: https://img.shields.io/npm/dm/unist-util-visit.svg - -[downloads]: https://www.npmjs.com/package/unist-util-visit - -[size-badge]: https://img.shields.io/bundlephobia/minzip/unist-util-visit.svg - -[size]: https://bundlephobia.com/result?p=unist-util-visit - -[npm]: https://docs.npmjs.com/cli/install - -[license]: license - -[author]: https://wooorm.com - -[contributing]: https://github.com/syntax-tree/.github/blob/master/contributing.md - -[support]: https://github.com/syntax-tree/.github/blob/master/support.md - -[coc]: https://github.com/syntax-tree/.github/blob/master/code-of-conduct.md - -[unist]: https://github.com/syntax-tree/unist - -[vp]: https://github.com/syntax-tree/unist-util-visit-parents - -[index]: https://github.com/syntax-tree/unist#index - -[parent]: https://github.com/syntax-tree/unist#parent-1 diff --git a/node_modules/vfile-location/index.js b/node_modules/vfile-location/index.js deleted file mode 100644 index 2d7c21c1..00000000 --- a/node_modules/vfile-location/index.js +++ /dev/null @@ -1,74 +0,0 @@ -'use strict' - -module.exports = factory - -function factory(file) { - var contents = indices(String(file)) - - return { - toPosition: offsetToPositionFactory(contents), - toOffset: positionToOffsetFactory(contents) - } -} - -// Factory to get the line and column-based `position` for `offset` in the bound -// indices. -function offsetToPositionFactory(indices) { - return offsetToPosition - - // Get the line and column-based `position` for `offset` in the bound indices. - function offsetToPosition(offset) { - var index = -1 - var length = indices.length - - if (offset < 0) { - return {} - } - - while (++index < length) { - if (indices[index] > offset) { - return { - line: index + 1, - column: offset - (indices[index - 1] || 0) + 1, - offset: offset - } - } - } - - return {} - } -} - -// Factory to get the `offset` for a line and column-based `position` in the -// bound indices. -function positionToOffsetFactory(indices) { - return positionToOffset - - // Get the `offset` for a line and column-based `position` in the bound - // indices. - function positionToOffset(position) { - var line = position && position.line - var column = position && position.column - - if (!isNaN(line) && !isNaN(column) && line - 1 in indices) { - return (indices[line - 2] || 0) + column - 1 || 0 - } - - return -1 - } -} - -// Get indices of line-breaks in `value`. -function indices(value) { - var result = [] - var index = value.indexOf('\n') - - while (index !== -1) { - result.push(index + 1) - index = value.indexOf('\n', index + 1) - } - - result.push(value.length + 1) - - return result -} diff --git a/node_modules/vfile-location/license b/node_modules/vfile-location/license deleted file mode 100644 index 8d8660d3..00000000 --- a/node_modules/vfile-location/license +++ /dev/null @@ -1,22 +0,0 @@ -(The MIT License) - -Copyright (c) 2016 Titus Wormer <tituswormer@gmail.com> - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/vfile-location/package.json b/node_modules/vfile-location/package.json deleted file mode 100644 index f66857cf..00000000 --- a/node_modules/vfile-location/package.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "name": "vfile-location", - "version": "2.0.6", - "description": "Convert between positions (line and column-based) and offsets (range-based) locations in a virtual file", - "license": "MIT", - "keywords": [ - "remark", - "comment", - "message", - "marker", - "control" - ], - "repository": "vfile/vfile-location", - "bugs": "https://github.com/vfile/vfile-location/issues", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - }, - "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", - "contributors": [ - "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)" - ], - "files": [ - "index.js" - ], - "dependencies": {}, - "devDependencies": { - "browserify": "^16.0.0", - "nyc": "^14.0.0", - "prettier": "^1.0.0", - "remark-cli": "^7.0.0", - "remark-preset-wooorm": "^6.0.0", - "tape": "^4.0.0", - "tinyify": "^2.0.0", - "vfile": "^4.0.0", - "xo": "^0.25.0" - }, - "scripts": { - "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", - "build-bundle": "browserify . -s vfileLocation > vfile-location.js", - "build-mangle": "browserify . -s vfileLocation -p tinyify > vfile-location.min.js", - "build": "npm run build-bundle && npm run build-mangle", - "test-api": "node test", - "test-coverage": "nyc --reporter lcov tape test.js", - "test": "npm run format && npm run build && npm run test-coverage" - }, - "nyc": { - "check-coverage": true, - "lines": 100, - "functions": 100, - "branches": 100 - }, - "prettier": { - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "bracketSpacing": false, - "semi": false, - "trailingComma": "none" - }, - "xo": { - "prettier": true, - "esnext": false, - "ignores": [ - "vfile-location.js" - ] - }, - "remarkConfig": { - "plugins": [ - "preset-wooorm" - ] - } -} diff --git a/node_modules/vfile-location/readme.md b/node_modules/vfile-location/readme.md deleted file mode 100644 index aa126a3f..00000000 --- a/node_modules/vfile-location/readme.md +++ /dev/null @@ -1,115 +0,0 @@ -# vfile-location - -[![Build][build-badge]][build] -[![Coverage][coverage-badge]][coverage] -[![Downloads][downloads-badge]][downloads] -[![Size][size-badge]][size] -[![Sponsors][sponsors-badge]][collective] -[![Backers][backers-badge]][collective] -[![Chat][chat-badge]][chat] - -Convert between positions (line and column-based) and offsets (range-based) -locations in a [virtual file][vfile]. - -## Install - -[npm][]: - -```sh -npm install vfile-location -``` - -## Usage - -```js -var vfile = require('vfile') -var vfileLocation = require('vfile-location') - -var location = vfileLocation(vfile('foo\nbar\nbaz')) - -var offset = location.toOffset({line: 3, column: 3}) // => 10 -location.toPosition(offset) // => {line: 3, column: 3, offset: 10} -``` - -## API - -### `location = vfileLocation(doc)` - -Get transform functions for the given `doc` (`string`) or [`file`][vfile]. - -Returns an object with [`toOffset`][to-offset] and [`toPosition`][to-position]. - -### `location.toOffset(position)` - -Get the `offset` (`number`) for a line and column-based [`position`][position] -in the bound file. -Returns `-1` when given invalid or out of bounds input. - -### `location.toPosition(offset)` - -Get the line and column-based [`position`][position] for `offset` in the bound -file. - -## Contribute - -See [`contributing.md`][contributing] in [`vfile/.github`][health] for ways to -get started. -See [`support.md`][support] for ways to get help. - -This project has a [Code of Conduct][coc]. -By interacting with this repository, organisation, or community you agree to -abide by its terms. - -## License - -[MIT][license] © [Titus Wormer][author] - -<!-- Definitions --> - -[build-badge]: https://img.shields.io/travis/vfile/vfile-location.svg - -[build]: https://travis-ci.org/vfile/vfile-location - -[coverage-badge]: https://img.shields.io/codecov/c/github/vfile/vfile-location.svg - -[coverage]: https://codecov.io/github/vfile/vfile-location - -[downloads-badge]: https://img.shields.io/npm/dm/vfile-location.svg - -[downloads]: https://www.npmjs.com/package/vfile-location - -[size-badge]: https://img.shields.io/bundlephobia/minzip/vfile-location.svg - -[size]: https://bundlephobia.com/result?p=vfile-location - -[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg - -[backers-badge]: https://opencollective.com/unified/backers/badge.svg - -[collective]: https://opencollective.com/unified - -[chat-badge]: https://img.shields.io/badge/join%20the%20community-on%20spectrum-7b16ff.svg - -[chat]: https://spectrum.chat/unified/vfile - -[npm]: https://docs.npmjs.com/cli/install - -[contributing]: https://github.com/vfile/.github/blob/master/contributing.md - -[support]: https://github.com/vfile/.github/blob/master/support.md - -[health]: https://github.com/vfile/.github - -[coc]: https://github.com/vfile/.github/blob/master/code-of-conduct.md - -[license]: license - -[author]: https://wooorm.com - -[vfile]: https://github.com/vfile/vfile - -[to-offset]: #locationtooffsetposition - -[to-position]: #locationtopositionoffset - -[position]: https://github.com/syntax-tree/unist#position diff --git a/node_modules/vfile-message/package.json b/node_modules/vfile-message/package.json index 7396bb68..a26c628d 100644 --- a/node_modules/vfile-message/package.json +++ b/node_modules/vfile-message/package.json @@ -1,43 +1,56 @@ { "name": "vfile-message", - "version": "1.1.1", - "description": "Create a virtual message", + "version": "2.0.4", + "description": "vfile utility to create a virtual message", "license": "MIT", "keywords": [ "vfile", + "vfile-util", + "util", + "utility", "virtual", + "file", "message" ], "repository": "vfile/vfile-message", "bugs": "https://github.com/vfile/vfile-message/issues", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", "contributors": [ "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)" ], + "types": "types/index.d.ts", "files": [ + "types/index.d.ts", "index.js" ], "dependencies": { - "unist-util-stringify-position": "^1.1.1" + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^2.0.0" }, "devDependencies": { "browserify": "^16.0.0", - "nyc": "^13.0.0", - "prettier": "^1.12.1", - "remark-cli": "^6.0.0", - "remark-preset-wooorm": "^4.0.0", + "dtslint": "^3.0.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^7.0.0", + "remark-preset-wooorm": "^6.0.0", "tape": "^4.0.0", - "tinyify": "^2.4.3", - "xo": "^0.23.0" + "tinyify": "^2.0.0", + "xo": "^0.28.0" }, "scripts": { - "format": "remark . -qfo && prettier --write '**/*.js' && xo --fix", + "format": "remark . -qfo && prettier --write \"**/*.{js,ts}\" && xo --fix", "build-bundle": "browserify . -s vfileMessage > vfile-message.js", "build-mangle": "browserify . -s vfileMessage -p tinyify > vfile-message.min.js", "build": "npm run build-bundle && npm run build-mangle", "test-api": "node test", "test-coverage": "nyc --reporter lcov tape test.js", - "test": "npm run format && npm run build && npm run test-coverage" + "test-types": "dtslint types", + "test": "npm run format && npm run build && npm run test-coverage && npm run test-types" }, "nyc": { "check-coverage": true, @@ -57,11 +70,10 @@ "prettier": true, "esnext": false, "rules": { - "no-var": "off", - "prefer-arrow-callback": "off", - "object-shorthand": "off" + "@typescript-eslint/member-ordering": "off" }, "ignores": [ + "types", "vfile-message.js" ] }, diff --git a/node_modules/vfile-message/readme.md b/node_modules/vfile-message/readme.md index 0c88353a..894504a2 100644 --- a/node_modules/vfile-message/readme.md +++ b/node_modules/vfile-message/readme.md @@ -3,11 +3,14 @@ [![Build][build-badge]][build] [![Coverage][coverage-badge]][coverage] [![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] [![Chat][chat-badge]][chat] Create [vfile][] messages. -## Installation +## Install [npm][]: @@ -15,7 +18,7 @@ Create [vfile][] messages. npm install vfile-message ``` -## Usage +## Use ```js var VMessage = require('vfile-message') @@ -48,15 +51,15 @@ Yields: ### `VMessage(reason[, position][, origin])` -Constructor of a message for `reason` at `position` from `origin`. When -an error is passed in as `reason`, copies the stack. +Constructor of a message for `reason` at `position` from `origin`. +When an error is passed in as `reason`, copies the stack. ##### Parameters ###### `reason` -Reason for message (`string` or `Error`). Uses the stack and message of the -error if given. +Reason for message (`string` or `Error`). +Uses the stack and message of the error if given. ###### `position` @@ -87,9 +90,9 @@ Reason for message (`string`). ###### `fatal` -If `true`, marks associated file as no longer processable (`boolean?`). If -`false`, necessitates a (potential) change. The value can also be `null` or -`undefined`. +If `true`, marks associated file as no longer processable (`boolean?`). +If `false`, necessitates a (potential) change. +The value can also be `null` or `undefined`. ###### `line` @@ -101,9 +104,9 @@ Starting column of error (`number?`). ###### `location` -Full range information, when available ([`Position`][position]). Has `start` -and `end` properties, both set to an object with `line` and `column`, set to -`number?`. +Full range information, when available ([`Position`][position]). +Has `start` and `end` properties, both set to an object with `line` and +`column`, set to `number?`. ###### `source` @@ -138,10 +141,13 @@ You may add a `url` property with a link to documentation for the message. ## Contribute -See [`contributing.md` in `vfile/vfile`][contributing] for ways to get started. +See [`contributing.md`][contributing] in [`vfile/.github`][health] for ways to +get started. +See [`support.md`][support] for ways to get help. -This organisation has a [Code of Conduct][coc]. By interacting with this -repository, organisation, or community you agree to abide by its terms. +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. ## License @@ -161,12 +167,30 @@ repository, organisation, or community you agree to abide by its terms. [downloads]: https://www.npmjs.com/package/vfile-message -[chat-badge]: https://img.shields.io/badge/join%20the%20community-on%20spectrum-7b16ff.svg +[size-badge]: https://img.shields.io/bundlephobia/minzip/vfile-message.svg + +[size]: https://bundlephobia.com/result?p=vfile-message + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-spectrum-7b16ff.svg [chat]: https://spectrum.chat/unified/vfile [npm]: https://docs.npmjs.com/cli/install +[contributing]: https://github.com/vfile/.github/blob/master/contributing.md + +[support]: https://github.com/vfile/.github/blob/master/support.md + +[health]: https://github.com/vfile/.github + +[coc]: https://github.com/vfile/.github/blob/master/code-of-conduct.md + [license]: license [author]: https://wooorm.com @@ -181,10 +205,6 @@ repository, organisation, or community you agree to abide by its terms. [vfile]: https://github.com/vfile/vfile -[contributing]: https://github.com/vfile/vfile/blob/master/contributing.md - -[coc]: https://github.com/vfile/vfile/blob/master/code-of-conduct.md - [util]: https://github.com/vfile/vfile#utilities [reporter]: https://github.com/vfile/vfile-reporter diff --git a/node_modules/vfile-message/types/index.d.ts b/node_modules/vfile-message/types/index.d.ts new file mode 100644 index 00000000..d2178431 --- /dev/null +++ b/node_modules/vfile-message/types/index.d.ts @@ -0,0 +1,98 @@ +// TypeScript Version: 3.0 + +import * as Unist from 'unist' + +declare namespace vfileMessage { + /** + * Create a virtual message. + */ + interface VFileMessage extends Error { + /** + * Constructor of a message for `reason` at `position` from `origin`. + * When an error is passed in as `reason`, copies the `stack`. + * + * @param reason Reason for message (`string` or `Error`). Uses the stack and message of the error if given. + * @param position Place at which the message occurred in a file (`Node`, `Position`, or `Point`, optional). + * @param origin Place in code the message originates from (`string`, optional). + */ + ( + reason: string | Error, + position?: Unist.Node | Unist.Position | Unist.Point, + origin?: string + ): VFileMessage + + /** + * Constructor of a message for `reason` at `position` from `origin`. + * When an error is passed in as `reason`, copies the `stack`. + * + * @param reason Reason for message (`string` or `Error`). Uses the stack and message of the error if given. + * @param position Place at which the message occurred in a file (`Node`, `Position`, or `Point`, optional). + * @param origin Place in code the message originates from (`string`, optional). + */ + new ( + reason: string | Error, + position?: Unist.Node | Unist.Position | Unist.Point, + origin?: string + ): VFileMessage + + /** + * Category of message. + */ + ruleId: string | null + + /** + * Reason for message. + */ + reason: string + + /** + * Starting line of error. + */ + line: number | null + + /** + * Starting column of error. + */ + column: number | null + + /** + * Full range information, when available. + * Has start and end properties, both set to an object with line and column, set to number?. + */ + location: Unist.Position + + /** + * Namespace of warning. + */ + source: string | null + + /** + * If true, marks associated file as no longer processable. + */ + fatal?: boolean | null + + /** + * You may add a file property with a path of a file (used throughout the VFile ecosystem). + */ + file?: string + + /** + * You may add a note property with a long form description of the message (supported by vfile-reporter). + */ + note?: string + + /** + * You may add a url property with a link to documentation for the message. + */ + url?: string + + /** + * It’s OK to store custom data directly on the VMessage, some of those are handled by utilities. + */ + [key: string]: unknown + } +} + +declare const vfileMessage: vfileMessage.VFileMessage + +export = vfileMessage diff --git a/node_modules/vfile/changelog.md b/node_modules/vfile/changelog.md new file mode 100644 index 00000000..66bdd1a4 --- /dev/null +++ b/node_modules/vfile/changelog.md @@ -0,0 +1,5 @@ +# Changelog + +See [GitHub Releases][releases] for the changelog. + +[releases]: https://github.com/vfile/vfile/releases diff --git a/node_modules/vfile/core.js b/node_modules/vfile/core.js index 2d88a333..ea73f466 100644 --- a/node_modules/vfile/core.js +++ b/node_modules/vfile/core.js @@ -1,169 +1,3 @@ -'use strict'; +'use strict' -var path = require('path'); -var replace = require('replace-ext'); -var buffer = require('is-buffer'); - -module.exports = VFile; - -var own = {}.hasOwnProperty; -var proto = VFile.prototype; - -proto.toString = toString; - -/* Order of setting (least specific to most), we need this because - * otherwise `{stem: 'a', path: '~/b.js'}` would throw, as a path - * is needed before a stem can be set. */ -var order = [ - 'history', - 'path', - 'basename', - 'stem', - 'extname', - 'dirname' -]; - -/* Construct a new file. */ -function VFile(options) { - var prop; - var index; - var length; - - if (!options) { - options = {}; - } else if (typeof options === 'string' || buffer(options)) { - options = {contents: options}; - } else if ('message' in options && 'messages' in options) { - return options; - } - - if (!(this instanceof VFile)) { - return new VFile(options); - } - - this.data = {}; - this.messages = []; - this.history = []; - this.cwd = process.cwd(); - - /* Set path related properties in the correct order. */ - index = -1; - length = order.length; - - while (++index < length) { - prop = order[index]; - - if (own.call(options, prop)) { - this[prop] = options[prop]; - } - } - - /* Set non-path related properties. */ - for (prop in options) { - if (order.indexOf(prop) === -1) { - this[prop] = options[prop]; - } - } -} - -/* Access full path (`~/index.min.js`). */ -Object.defineProperty(proto, 'path', { - get: function () { - return this.history[this.history.length - 1]; - }, - set: function (path) { - assertNonEmpty(path, 'path'); - - if (path !== this.path) { - this.history.push(path); - } - } -}); - -/* Access parent path (`~`). */ -Object.defineProperty(proto, 'dirname', { - get: function () { - return typeof this.path === 'string' ? path.dirname(this.path) : undefined; - }, - set: function (dirname) { - assertPath(this.path, 'dirname'); - this.path = path.join(dirname || '', this.basename); - } -}); - -/* Access basename (`index.min.js`). */ -Object.defineProperty(proto, 'basename', { - get: function () { - return typeof this.path === 'string' ? path.basename(this.path) : undefined; - }, - set: function (basename) { - assertNonEmpty(basename, 'basename'); - assertPart(basename, 'basename'); - this.path = path.join(this.dirname || '', basename); - } -}); - -/* Access extname (`.js`). */ -Object.defineProperty(proto, 'extname', { - get: function () { - return typeof this.path === 'string' ? path.extname(this.path) : undefined; - }, - set: function (extname) { - var ext = extname || ''; - - assertPart(ext, 'extname'); - assertPath(this.path, 'extname'); - - if (ext) { - if (ext.charAt(0) !== '.') { - throw new Error('`extname` must start with `.`'); - } - - if (ext.indexOf('.', 1) !== -1) { - throw new Error('`extname` cannot contain multiple dots'); - } - } - - this.path = replace(this.path, ext); - } -}); - -/* Access stem (`index.min`). */ -Object.defineProperty(proto, 'stem', { - get: function () { - return typeof this.path === 'string' ? path.basename(this.path, this.extname) : undefined; - }, - set: function (stem) { - assertNonEmpty(stem, 'stem'); - assertPart(stem, 'stem'); - this.path = path.join(this.dirname || '', stem + (this.extname || '')); - } -}); - -/* Get the value of the file. */ -function toString(encoding) { - var value = this.contents || ''; - return buffer(value) ? value.toString(encoding) : String(value); -} - -/* Assert that `part` is not a path (i.e., does - * not contain `path.sep`). */ -function assertPart(part, name) { - if (part.indexOf(path.sep) !== -1) { - throw new Error('`' + name + '` cannot be a path: did not expect `' + path.sep + '`'); - } -} - -/* Assert that `part` is not empty. */ -function assertNonEmpty(part, name) { - if (!part) { - throw new Error('`' + name + '` cannot be empty'); - } -} - -/* Assert `path` exists. */ -function assertPath(path, name) { - if (!path) { - throw new Error('Setting `' + name + '` requires `path` to be set too'); - } -} +module.exports = require('./lib/core') diff --git a/node_modules/vfile/index.js b/node_modules/vfile/index.js index 9b3c7e0d..3541ac57 100644 --- a/node_modules/vfile/index.js +++ b/node_modules/vfile/index.js @@ -1,53 +1,3 @@ -'use strict'; +'use strict' -var VMessage = require('vfile-message'); -var VFile = require('./core.js'); - -module.exports = VFile; - -var proto = VFile.prototype; - -proto.message = message; -proto.info = info; -proto.fail = fail; - -/* Slight backwards compatibility. Remove in the future. */ -proto.warn = message; - -/* Create a message with `reason` at `position`. - * When an error is passed in as `reason`, copies the stack. */ -function message(reason, position, origin) { - var filePath = this.path; - var message = new VMessage(reason, position, origin); - - if (filePath) { - message.name = filePath + ':' + message.name; - message.file = filePath; - } - - message.fatal = false; - - this.messages.push(message); - - return message; -} - -/* Fail. Creates a vmessage, associates it with the file, - * and throws it. */ -function fail() { - var message = this.message.apply(this, arguments); - - message.fatal = true; - - throw message; -} - -/* Info. Creates a vmessage, associates it with the file, - * and marks the fatality as null. */ -function info() { - var message = this.message.apply(this, arguments); - - message.fatal = null; - - return message; -} +module.exports = require('./lib') diff --git a/node_modules/vfile/lib/core.js b/node_modules/vfile/lib/core.js new file mode 100644 index 00000000..891bb1aa --- /dev/null +++ b/node_modules/vfile/lib/core.js @@ -0,0 +1,173 @@ +'use strict' + +var p = require('./minpath') +var proc = require('./minproc') +var buffer = require('is-buffer') + +module.exports = VFile + +var own = {}.hasOwnProperty + +// Order of setting (least specific to most), we need this because otherwise +// `{stem: 'a', path: '~/b.js'}` would throw, as a path is needed before a +// stem can be set. +var order = ['history', 'path', 'basename', 'stem', 'extname', 'dirname'] + +VFile.prototype.toString = toString + +// Access full path (`~/index.min.js`). +Object.defineProperty(VFile.prototype, 'path', {get: getPath, set: setPath}) + +// Access parent path (`~`). +Object.defineProperty(VFile.prototype, 'dirname', { + get: getDirname, + set: setDirname +}) + +// Access basename (`index.min.js`). +Object.defineProperty(VFile.prototype, 'basename', { + get: getBasename, + set: setBasename +}) + +// Access extname (`.js`). +Object.defineProperty(VFile.prototype, 'extname', { + get: getExtname, + set: setExtname +}) + +// Access stem (`index.min`). +Object.defineProperty(VFile.prototype, 'stem', {get: getStem, set: setStem}) + +// Construct a new file. +function VFile(options) { + var prop + var index + + if (!options) { + options = {} + } else if (typeof options === 'string' || buffer(options)) { + options = {contents: options} + } else if ('message' in options && 'messages' in options) { + return options + } + + if (!(this instanceof VFile)) { + return new VFile(options) + } + + this.data = {} + this.messages = [] + this.history = [] + this.cwd = proc.cwd() + + // Set path related properties in the correct order. + index = -1 + + while (++index < order.length) { + prop = order[index] + + if (own.call(options, prop)) { + this[prop] = options[prop] + } + } + + // Set non-path related properties. + for (prop in options) { + if (order.indexOf(prop) < 0) { + this[prop] = options[prop] + } + } +} + +function getPath() { + return this.history[this.history.length - 1] +} + +function setPath(path) { + assertNonEmpty(path, 'path') + + if (this.path !== path) { + this.history.push(path) + } +} + +function getDirname() { + return typeof this.path === 'string' ? p.dirname(this.path) : undefined +} + +function setDirname(dirname) { + assertPath(this.path, 'dirname') + this.path = p.join(dirname || '', this.basename) +} + +function getBasename() { + return typeof this.path === 'string' ? p.basename(this.path) : undefined +} + +function setBasename(basename) { + assertNonEmpty(basename, 'basename') + assertPart(basename, 'basename') + this.path = p.join(this.dirname || '', basename) +} + +function getExtname() { + return typeof this.path === 'string' ? p.extname(this.path) : undefined +} + +function setExtname(extname) { + assertPart(extname, 'extname') + assertPath(this.path, 'extname') + + if (extname) { + if (extname.charCodeAt(0) !== 46 /* `.` */) { + throw new Error('`extname` must start with `.`') + } + + if (extname.indexOf('.', 1) > -1) { + throw new Error('`extname` cannot contain multiple dots') + } + } + + this.path = p.join(this.dirname, this.stem + (extname || '')) +} + +function getStem() { + return typeof this.path === 'string' + ? p.basename(this.path, this.extname) + : undefined +} + +function setStem(stem) { + assertNonEmpty(stem, 'stem') + assertPart(stem, 'stem') + this.path = p.join(this.dirname || '', stem + (this.extname || '')) +} + +// Get the value of the file. +function toString(encoding) { + return (this.contents || '').toString(encoding) +} + +// Assert that `part` is not a path (i.e., does not contain `p.sep`). +function assertPart(part, name) { + if (part && part.indexOf(p.sep) > -1) { + throw new Error( + '`' + name + '` cannot be a path: did not expect `' + p.sep + '`' + ) + } +} + +// Assert that `part` is not empty. +function assertNonEmpty(part, name) { + if (!part) { + throw new Error('`' + name + '` cannot be empty') + } +} + +// Assert `path` exists. +function assertPath(path, name) { + if (!path) { + throw new Error('Setting `' + name + '` requires `path` to be set too') + } +} diff --git a/node_modules/vfile/lib/index.js b/node_modules/vfile/lib/index.js new file mode 100644 index 00000000..b90c0bd3 --- /dev/null +++ b/node_modules/vfile/lib/index.js @@ -0,0 +1,46 @@ +'use strict' + +var VMessage = require('vfile-message') +var VFile = require('./core.js') + +module.exports = VFile + +VFile.prototype.message = message +VFile.prototype.info = info +VFile.prototype.fail = fail + +// Create a message with `reason` at `position`. +// When an error is passed in as `reason`, copies the stack. +function message(reason, position, origin) { + var message = new VMessage(reason, position, origin) + + if (this.path) { + message.name = this.path + ':' + message.name + message.file = this.path + } + + message.fatal = false + + this.messages.push(message) + + return message +} + +// Fail: creates a vmessage, associates it with the file, and throws it. +function fail() { + var message = this.message.apply(this, arguments) + + message.fatal = true + + throw message +} + +// Info: creates a vmessage, associates it with the file, and marks the fatality +// as null. +function info() { + var message = this.message.apply(this, arguments) + + message.fatal = null + + return message +} diff --git a/node_modules/vfile/lib/minpath.browser.js b/node_modules/vfile/lib/minpath.browser.js new file mode 100644 index 00000000..7b8a5944 --- /dev/null +++ b/node_modules/vfile/lib/minpath.browser.js @@ -0,0 +1,374 @@ +'use strict' + +// A derivative work based on: +// <https://github.com/browserify/path-browserify>. +// Which is licensed: +// +// MIT License +// +// Copyright (c) 2013 James Halliday +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of +// this software and associated documentation files (the "Software"), to deal in +// the Software without restriction, including without limitation the rights to +// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +// the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +// A derivative work based on: +// +// Parts of that are extracted from Node’s internal `path` module: +// <https://github.com/nodejs/node/blob/master/lib/path.js>. +// Which is licensed: +// +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +exports.basename = basename +exports.dirname = dirname +exports.extname = extname +exports.join = join +exports.sep = '/' + +function basename(path, ext) { + var start = 0 + var end = -1 + var index + var firstNonSlashEnd + var seenNonSlash + var extIndex + + if (ext !== undefined && typeof ext !== 'string') { + throw new TypeError('"ext" argument must be a string') + } + + assertPath(path) + index = path.length + + if (ext === undefined || !ext.length || ext.length > path.length) { + while (index--) { + if (path.charCodeAt(index) === 47 /* `/` */) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now. + if (seenNonSlash) { + start = index + 1 + break + } + } else if (end < 0) { + // We saw the first non-path separator, mark this as the end of our + // path component. + seenNonSlash = true + end = index + 1 + } + } + + return end < 0 ? '' : path.slice(start, end) + } + + if (ext === path) { + return '' + } + + firstNonSlashEnd = -1 + extIndex = ext.length - 1 + + while (index--) { + if (path.charCodeAt(index) === 47 /* `/` */) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now. + if (seenNonSlash) { + start = index + 1 + break + } + } else { + if (firstNonSlashEnd < 0) { + // We saw the first non-path separator, remember this index in case + // we need it if the extension ends up not matching. + seenNonSlash = true + firstNonSlashEnd = index + 1 + } + + if (extIndex > -1) { + // Try to match the explicit extension. + if (path.charCodeAt(index) === ext.charCodeAt(extIndex--)) { + if (extIndex < 0) { + // We matched the extension, so mark this as the end of our path + // component + end = index + } + } else { + // Extension does not match, so our result is the entire path + // component + extIndex = -1 + end = firstNonSlashEnd + } + } + } + } + + if (start === end) { + end = firstNonSlashEnd + } else if (end < 0) { + end = path.length + } + + return path.slice(start, end) +} + +function dirname(path) { + var end + var unmatchedSlash + var index + + assertPath(path) + + if (!path.length) { + return '.' + } + + end = -1 + index = path.length + + // Prefix `--` is important to not run on `0`. + while (--index) { + if (path.charCodeAt(index) === 47 /* `/` */) { + if (unmatchedSlash) { + end = index + break + } + } else if (!unmatchedSlash) { + // We saw the first non-path separator + unmatchedSlash = true + } + } + + return end < 0 + ? path.charCodeAt(0) === 47 /* `/` */ + ? '/' + : '.' + : end === 1 && path.charCodeAt(0) === 47 /* `/` */ + ? '//' + : path.slice(0, end) +} + +function extname(path) { + var startDot = -1 + var startPart = 0 + var end = -1 + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find. + var preDotState = 0 + var unmatchedSlash + var code + var index + + assertPath(path) + + index = path.length + + while (index--) { + code = path.charCodeAt(index) + + if (code === 47 /* `/` */) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now. + if (unmatchedSlash) { + startPart = index + 1 + break + } + + continue + } + + if (end < 0) { + // We saw the first non-path separator, mark this as the end of our + // extension. + unmatchedSlash = true + end = index + 1 + } + + if (code === 46 /* `.` */) { + // If this is our first dot, mark it as the start of our extension. + if (startDot < 0) { + startDot = index + } else if (preDotState !== 1) { + preDotState = 1 + } + } else if (startDot > -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension. + preDotState = -1 + } + } + + if ( + startDot < 0 || + end < 0 || + // We saw a non-dot character immediately before the dot. + preDotState === 0 || + // The (right-most) trimmed path component is exactly `..`. + (preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) + ) { + return '' + } + + return path.slice(startDot, end) +} + +function join() { + var index = -1 + var joined + + while (++index < arguments.length) { + assertPath(arguments[index]) + + if (arguments[index]) { + joined = + joined === undefined + ? arguments[index] + : joined + '/' + arguments[index] + } + } + + return joined === undefined ? '.' : normalize(joined) +} + +// Note: `normalize` is not exposed as `path.normalize`, so some code is +// manually removed from it. +function normalize(path) { + var absolute + var value + + assertPath(path) + + absolute = path.charCodeAt(0) === 47 /* `/` */ + + // Normalize the path according to POSIX rules. + value = normalizeString(path, !absolute) + + if (!value.length && !absolute) { + value = '.' + } + + if (value.length && path.charCodeAt(path.length - 1) === 47 /* / */) { + value += '/' + } + + return absolute ? '/' + value : value +} + +// Resolve `.` and `..` elements in a path with directory names. +function normalizeString(path, allowAboveRoot) { + var result = '' + var lastSegmentLength = 0 + var lastSlash = -1 + var dots = 0 + var index = -1 + var code + var lastSlashIndex + + while (++index <= path.length) { + if (index < path.length) { + code = path.charCodeAt(index) + } else if (code === 47 /* `/` */) { + break + } else { + code = 47 /* `/` */ + } + + if (code === 47 /* `/` */) { + if (lastSlash === index - 1 || dots === 1) { + // Empty. + } else if (lastSlash !== index - 1 && dots === 2) { + if ( + result.length < 2 || + lastSegmentLength !== 2 || + result.charCodeAt(result.length - 1) !== 46 /* `.` */ || + result.charCodeAt(result.length - 2) !== 46 /* `.` */ + ) { + if (result.length > 2) { + lastSlashIndex = result.lastIndexOf('/') + + /* istanbul ignore else - No clue how to cover it. */ + if (lastSlashIndex !== result.length - 1) { + if (lastSlashIndex < 0) { + result = '' + lastSegmentLength = 0 + } else { + result = result.slice(0, lastSlashIndex) + lastSegmentLength = result.length - 1 - result.lastIndexOf('/') + } + + lastSlash = index + dots = 0 + continue + } + } else if (result.length) { + result = '' + lastSegmentLength = 0 + lastSlash = index + dots = 0 + continue + } + } + + if (allowAboveRoot) { + result = result.length ? result + '/..' : '..' + lastSegmentLength = 2 + } + } else { + if (result.length) { + result += '/' + path.slice(lastSlash + 1, index) + } else { + result = path.slice(lastSlash + 1, index) + } + + lastSegmentLength = index - lastSlash - 1 + } + + lastSlash = index + dots = 0 + } else if (code === 46 /* `.` */ && dots > -1) { + dots++ + } else { + dots = -1 + } + } + + return result +} + +function assertPath(path) { + if (typeof path !== 'string') { + throw new TypeError( + 'Path must be a string. Received ' + JSON.stringify(path) + ) + } +} diff --git a/node_modules/vfile/lib/minpath.js b/node_modules/vfile/lib/minpath.js new file mode 100644 index 00000000..f50e1e54 --- /dev/null +++ b/node_modules/vfile/lib/minpath.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('path') diff --git a/node_modules/vfile/lib/minproc.browser.js b/node_modules/vfile/lib/minproc.browser.js new file mode 100644 index 00000000..2cb0418a --- /dev/null +++ b/node_modules/vfile/lib/minproc.browser.js @@ -0,0 +1,10 @@ +'use strict' + +// Somewhat based on: +// <https://github.com/defunctzombie/node-process/blob/master/browser.js>. +// But I don’t think one tiny line of code can be copyrighted. 😅 +exports.cwd = cwd + +function cwd() { + return '/' +} diff --git a/node_modules/vfile/lib/minproc.js b/node_modules/vfile/lib/minproc.js new file mode 100644 index 00000000..8347adcd --- /dev/null +++ b/node_modules/vfile/lib/minproc.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = process diff --git a/node_modules/vfile/package.json b/node_modules/vfile/package.json index aba5f34d..500e8328 100644 --- a/node_modules/vfile/package.json +++ b/node_modules/vfile/package.json @@ -1,9 +1,10 @@ { "name": "vfile", - "version": "2.3.0", + "version": "4.2.1", "description": "Virtual file format for text processing", "license": "MIT", "keywords": [ + "vfile", "virtual", "file", "text", @@ -12,47 +13,65 @@ "warning", "error", "remark", - "retext" + "retext", + "rehype" ], "repository": "vfile/vfile", "bugs": "https://github.com/vfile/vfile/issues", - "author": "Titus Wormer <tituswormer@gmail.com> (http://wooorm.com)", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", "contributors": [ - "Titus Wormer <tituswormer@gmail.com> (http://wooorm.com)", + "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)", "Brendan Abbott <brendan.abbott@temando.com>", "Denys Dovhan <email@denysdovhan.com>", "Kyle Mathews <mathews.kyle@gmail.com>", "Shinnosuke Watanabe <snnskwtnb@gmail.com>", "Sindre Sorhus <sindresorhus@gmail.com>" ], + "types": "types/index.d.ts", + "browser": { + "./lib/minpath.js": "./lib/minpath.browser.js", + "./lib/minproc.js": "./lib/minproc.browser.js" + }, + "react-native": { + "./lib/minpath.js": "./lib/minpath.browser.js", + "./lib/minproc.js": "./lib/minproc.browser.js" + }, "files": [ - "core.js", - "index.js" + "lib/", + "types/index.d.ts", + "index.js", + "core.js" ], "dependencies": { - "is-buffer": "^1.1.4", - "replace-ext": "1.0.0", - "unist-util-stringify-position": "^1.0.0", - "vfile-message": "^1.0.0" + "@types/unist": "^2.0.0", + "is-buffer": "^2.0.0", + "unist-util-stringify-position": "^2.0.0", + "vfile-message": "^2.0.0" }, "devDependencies": { - "browserify": "^14.0.0", - "esmangle": "^1.0.0", - "nyc": "^11.0.0", - "remark-cli": "^4.0.0", - "remark-preset-wooorm": "^3.0.0", - "tape": "^4.4.0", - "xo": "^0.18.0" + "browserify": "^17.0.0", + "dtslint": "^4.0.0", + "nyc": "^15.0.0", + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "tinyify": "^3.0.0", + "xo": "^0.35.0" }, "scripts": { - "build-md": "remark . -qfo", - "build-bundle": "browserify index.js -s VFile > vfile.js", - "build-mangle": "esmangle vfile.js > vfile.min.js", - "build": "npm run build-md && npm run build-bundle && npm run build-mangle", - "lint": "xo", + "format": "remark . -qfo && prettier . --write && xo --fix", + "build-bundle": "browserify . -s VFile -o vfile.js", + "build-mangle": "browserify . -s VFile -o vfile.min.js -p tinyify", + "build": "npm run build-bundle && npm run build-mangle", "test-api": "node test", "test-coverage": "nyc --reporter lcov tape test.js", - "test": "npm run build && npm run lint && npm run test-coverage" + "test-types": "dtslint types", + "test": "npm run format && npm run build && npm run test-coverage && npm run test-types" }, "nyc": { "check-coverage": true, @@ -60,19 +79,37 @@ "functions": 100, "branches": 100 }, + "prettier": { + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "bracketSpacing": false, + "semi": false, + "trailingComma": "none" + }, "xo": { - "space": true, + "prettier": true, "esnext": false, - "rules": { - "unicorn/no-new-buffer": "off" - }, "ignores": [ + "types", "vfile.js" - ] + ], + "rules": { + "unicorn/explicit-length-check": "off", + "unicorn/prefer-includes": "off", + "unicorn/prefer-reflect-apply": "off", + "unicorn/prefer-number-properties": "off", + "max-depth": "off", + "complexity": "off" + } }, "remarkConfig": { "plugins": [ - "preset-wooorm" + "preset-wooorm", + [ + "lint-no-html", + false + ] ] } } diff --git a/node_modules/vfile/readme.md b/node_modules/vfile/readme.md index 1488031d..739e7ebe 100644 --- a/node_modules/vfile/readme.md +++ b/node_modules/vfile/readme.md @@ -1,82 +1,92 @@ -# ![vfile][] - -[![Build Status][build-badge]][build-status] -[![Coverage Status][coverage-badge]][coverage-status] - -**VFile** is a virtual file format used by [**unified**][unified], -a text processing umbrella (it powers [**retext**][retext] for -natural language, [**remark**][remark] for markdown, and -[**rehype**][rehype] for HTML). Each processors that parse, transform, -and compile text, and need a virtual representation of files and a -place to store [messages][] about them. Plus, they work in the browser. -**VFile** provides these requirements at a small size, in IE 9 and up. - -> **VFile** is different from the excellent [**vinyl**][vinyl] -> in that it has a smaller API, a smaller size, and focuses on -> [messages][]. - -VFile can be used anywhere where files need a lightweight representation. -For example, it’s used in: - -* [`documentation`](https://github.com/documentationjs/documentation) - — The documentation system for modern JavaScript -* [`weh`](https://github.com/wehjs/weh) - — Declarative small site generator -* [`geojsonhint`](https://github.com/mapbox/geojsonhint) - — Complete, fast, standards-based validation for geojson +<h1> + <img src="https://raw.githubusercontent.com/vfile/vfile/7e1e6a6/logo.svg?sanitize=true" alt="vfile" width="400" /> +</h1> + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] +[![Sponsors][sponsors-badge]][collective] +[![Backers][backers-badge]][collective] +[![Chat][chat-badge]][chat] + +**vfile** is a small and browser friendly virtual file format that tracks +metadata (such as a file’s `path` and `contents`) and [messages][]. + +It was made specifically for **[unified][]** and generally for the common task +of parsing, transforming, and serializing data, where `vfile` handles everything +about the document being compiled. +This is useful for example when building linters, compilers, static site +generators, or other build tools. +**vfile** is part of the [unified collective][site]. + +* for updates, see [Twitter][] +* for more about us, see [`unifiedjs.com`][site] +* for questions, see [Discussions][chat] +* to help, see [contribute][] or [sponsor][] below + +> **vfile** is different from the excellent [`vinyl`][vinyl] in that it has +> a smaller API, a smaller size, and focuses on [messages][]. + +## Contents + +* [Install](#install) +* [Use](#use) +* [API](#api) + * [`VFile(options?)`](#vfileoptions) + * [`vfile.contents`](#vfilecontents) + * [`vfile.cwd`](#vfilecwd) + * [`vfile.path`](#vfilepath) + * [`vfile.basename`](#vfilebasename) + * [`vfile.stem`](#vfilestem) + * [`vfile.extname`](#vfileextname) + * [`vfile.dirname`](#vfiledirname) + * [`vfile.history`](#vfilehistory) + * [`vfile.messages`](#vfilemessages) + * [`vfile.data`](#vfiledata) + * [`VFile#toString(encoding?)`](#vfiletostringencoding) + * [`VFile#message(reason[, position][, origin])`](#vfilemessagereason-position-origin) + * [`VFile#info(reason[, position][, origin])`](#vfileinforeason-position-origin) + * [`VFile#fail(reason[, position][, origin])`](#vfilefailreason-position-origin) +* [List of utilities](#list-of-utilities) +* [Reporters](#reporters) +* [Contribute](#contribute) +* [Sponsor](#sponsor) +* [Acknowledgments](#acknowledgments) +* [License](#license) -## Installation +## Install [npm][]: -```bash +```sh npm install vfile ``` -## Table of Contents - -* [Usage](#usage) -* [Utilities](#utilities) -* [Reporters](#reporters) -* [API](#api) - * [VFile(\[options\])](#vfileoptions) - * [vfile.contents](#vfilecontents) - * [vfile.cwd](#vfilecwd) - * [vfile.path](#vfilepath) - * [vfile.basename](#vfilebasename) - * [vfile.stem](#vfilestem) - * [vfile.extname](#vfileextname) - * [vfile.dirname](#vfiledirname) - * [vfile.history](#vfilehistory) - * [vfile.messages](#vfilemessages) - * [vfile.data](#vfiledata) - * [VFile#toString(\[encoding\])](#vfiletostringencoding) - * [VFile#message(reason\[, position\]\[, origin\])](#vfilemessagereason-position-origin) - * [VFile#info(reason\[, position\]\[, origin\])](#vfileinforeason-position-origin) - * [VFile#fail(reason\[, position\]\[, origin\])](#vfilefailreason-position-origin) -* [License](#license) - -## Usage +## Use ```js -var vfile = require('vfile'); +var vfile = require('vfile') -var file = vfile({path: '~/example.txt', contents: 'Alpha *braavo* charlie.'}); +var file = vfile({path: '~/example.txt', contents: 'Alpha *braavo* charlie.'}) -file.path; //=> '~/example.txt' -file.dirname; //=> '~' +file.path // => '~/example.txt' +file.dirname // => '~' -file.extname = '.md'; +file.extname = '.md' -file.basename; //=> 'example.md' +file.basename // => 'example.md' -file.basename = 'index.text'; +file.basename = 'index.text' -file.history; //=> ['~/example.txt', '~/example.md', '~/index.text'] +file.history // => ['~/example.txt', '~/example.md', '~/index.text'] -file.message('`braavo` is misspelt; did you mean `bravo`?', {line: 1, column: 8}); +file.message('`braavo` is misspelt; did you mean `bravo`?', { + line: 1, + column: 8 +}) -console.log(file.messages); +console.log(file.messages) ``` Yields: @@ -95,70 +105,30 @@ Yields: fatal: false } ] ``` -## Utilities - -The following list of projects includes tools for working with virtual -files. See [**Unist**][unist] for projects working with nodes. - -* [`convert-vinyl-to-vfile`](https://github.com/dustinspecker/convert-vinyl-to-vfile) - — Convert from [Vinyl][] -* [`is-vfile-message`](https://github.com/shinnn/is-vfile-message) - — Check if a value is a `VMessage` object -* [`to-vfile`](https://github.com/vfile/to-vfile) - — Create a virtual file from a file-path (and optionally read it) -* [`vfile-find-down`](https://github.com/vfile/vfile-find-down) - — Find files by searching the file system downwards -* [`vfile-find-up`](https://github.com/vfile/vfile-find-up) - — Find files by searching the file system upwards -* [`vfile-location`](https://github.com/vfile/vfile-location) - — Convert between line/column- and range-based locations -* [`vfile-statistics`](https://github.com/vfile/vfile-statistics) - — Count messages per category -* [`vfile-messages-to-vscode-diagnostics`](https://github.com/shinnn/vfile-messages-to-vscode-diagnostics) - — Convert to VS Code diagnostics -* [`vfile-sort`](https://github.com/vfile/vfile-sort) - — Sort messages by line/column -* [`vfile-to-eslint`](https://github.com/vfile/vfile-to-eslint) - — Convert VFiles to ESLint formatter compatible output - -## Reporters - -The following list of projects show linting results for given virtual files. -Reporters _must_ accept `Array.<VFile>` as their first argument, and return -`string`. Reporters _may_ accept other values too, in which case it’s suggested -to stick to `vfile-reporter`s interface. - -* [`vfile-reporter`][reporter] - — Stylish reporter -* [`vfile-reporter-json`](https://github.com/vfile/vfile-reporter-json) - — JSON reporter -* [`vfile-reporter-pretty`](https://github.com/vfile/vfile-reporter-pretty) - — Pretty reporter - ## API -### `VFile([options])` +### `VFile(options?)` -Create a new virtual file. If `options` is `string` or `Buffer`, treats -it as `{contents: options}`. If `options` is a `VFile`, returns it. +Create a new virtual file. +If `options` is `string` or `Buffer`, treats it as `{contents: options}`. +If `options` is a `VFile`, returns it. All other options are set on the newly created `vfile`. -Path related properties are set in the following order (least specific -to most specific): `history`, `path`, `basename`, `stem`, `extname`, -`dirname`. +Path related properties are set in the following order (least specific to most +specific): `history`, `path`, `basename`, `stem`, `extname`, `dirname`. -It’s not possible to set either `dirname` or `extname` without setting -either `history`, `path`, `basename`, or `stem` as well. +It’s not possible to set either `dirname` or `extname` without setting either +`history`, `path`, `basename`, or `stem` as well. ###### Example ```js -vfile(); -vfile('console.log("alpha");'); -vfile(Buffer.from('exit 1')); -vfile({path: path.join(__dirname, 'readme.md')}); -vfile({stem: 'readme', extname: '.md', dirname: __dirname}); -vfile({other: 'properties', are: 'copied', ov: {e: 'r'}}); +vfile() +vfile('console.log("alpha");') +vfile(Buffer.from('exit 1')) +vfile({path: path.join(__dirname, 'readme.md')}) +vfile({stem: 'readme', extname: '.md', dirname: __dirname}) +vfile({other: 'properties', are: 'copied', ov: {e: 'r'}}) ``` ### `vfile.contents` @@ -167,32 +137,34 @@ vfile({other: 'properties', are: 'copied', ov: {e: 'r'}}); ### `vfile.cwd` -`string` — Base of `path`. Defaults to `process.cwd()`. +`string` — Base of `path`. +Defaults to `process.cwd()`. ### `vfile.path` -`string?` — Path of `vfile`. Cannot be nullified. +`string?` — Path of `vfile`. +Cannot be nullified. ### `vfile.basename` -`string?` — Current name (including extension) of `vfile`. Cannot -contain path separators. Cannot be nullified either (use -`file.path = file.dirname` instead). +`string?` — Current name (including extension) of `vfile`. +Cannot contain path separators. +Cannot be nullified either (use `file.path = file.dirname` instead). ### `vfile.stem` -`string?` — Name (without extension) of `vfile`. Cannot be nullified, -and cannot contain path separators. +`string?` — Name (without extension) of `vfile`. +Cannot be nullified, and cannot contain path separators. ### `vfile.extname` -`string?` — Extension (with dot) of `vfile`. Cannot be set if -there’s no `path` yet and cannot contain path separators. +`string?` — Extension (with dot) of `vfile`. +Cannot be set if there’s no `path` yet and cannot contain path separators. ### `vfile.dirname` -`string?` — Path to parent directory of `vfile`. Cannot be set if -there’s no `path` yet. +`string?` — Path to parent directory of `vfile`. +Cannot be set if there’s no `path` yet. ### `vfile.history` @@ -204,14 +176,16 @@ there’s no `path` yet. ### `vfile.data` -`Object` — Place to store custom information. It’s OK to store custom -data directly on the `vfile`, moving it to `data` gives a _little_ more -privacy. +`Object` — Place to store custom information. +It’s OK to store custom data directly on the `vfile`, moving it to `data` gives +a *little* more privacy. -### `VFile#toString([encoding])` +### `VFile#toString(encoding?)` -Convert contents of `vfile` to string. If `contents` is a buffer, -`encoding` is used to stringify buffers (default: `'utf8'`). +Convert contents of `vfile` to string. +When `contents` is a [`Buffer`][buffer], `encoding` is a +[character encoding][encoding] to understand `doc` as (`string`, default: +`'utf8'`). ### `VFile#message(reason[, position][, origin])` @@ -226,7 +200,8 @@ Constructs a new [`VMessage`][vmessage] and adds it to ### `VFile#info(reason[, position][, origin])` Associates an informational message with the file, where `fatal` is set to -`null`. Calls [`#message()`][message] internally. +`null`. +Calls [`#message()`][message] internally. ##### Returns @@ -242,38 +217,196 @@ Calls [`#message()`][message] internally. [`VMessage`][vmessage]. +<a name="utilities"></a> + +## List of utilities + +The following list of projects includes tools for working with virtual files. +See **[unist][]** for projects that work with nodes. + +* [`convert-vinyl-to-vfile`](https://github.com/dustinspecker/convert-vinyl-to-vfile) + — transform from [Vinyl][] to vfile +* [`to-vfile`](https://github.com/vfile/to-vfile) + — create a vfile from a filepath +* [`vfile-find-down`](https://github.com/vfile/vfile-find-down) + — find files by searching the file system downwards +* [`vfile-find-up`](https://github.com/vfile/vfile-find-up) + — find files by searching the file system upwards +* [`vfile-glob`](https://github.com/shinnn/vfile-glob) + — find files by glob patterns +* [`vfile-is`](https://github.com/vfile/vfile-is) + — check if a vfile passes a test +* [`vfile-location`](https://github.com/vfile/vfile-location) + — convert between positional and offset locations +* [`vfile-matter`](https://github.com/vfile/vfile-matter) + — parse the YAML front matter +* [`vfile-message`](https://github.com/vfile/vfile-message) + — create a vfile message +* [`vfile-messages-to-vscode-diagnostics`](https://github.com/shinnn/vfile-messages-to-vscode-diagnostics) + — transform vfile messages to VS Code diagnostics +* [`vfile-mkdirp`](https://github.com/vfile/vfile-mkdirp) + — make sure the directory of a vfile exists on the file system +* [`vfile-rename`](https://github.com/vfile/vfile-rename) + — rename the path parts of a vfile +* [`vfile-sort`](https://github.com/vfile/vfile-sort) + — sort messages by line/column +* [`vfile-statistics`](https://github.com/vfile/vfile-statistics) + — count messages per category: failures, warnings, etc +* [`vfile-to-eslint`](https://github.com/vfile/vfile-to-eslint) + — convert to ESLint formatter compatible output + +## Reporters + +The following list of projects show linting results for given virtual files. +Reporters *must* accept `Array.<VFile>` as their first argument, and return +`string`. +Reporters *may* accept other values too, in which case it’s suggested to stick +to `vfile-reporter`s interface. + +* [`vfile-reporter`][reporter] + — create a report +* [`vfile-reporter-json`](https://github.com/vfile/vfile-reporter-json) + — create a JSON report +* [`vfile-reporter-folder-json`](https://github.com/vfile/vfile-reporter-folder-json) + — create a JSON representation of vfiles +* [`vfile-reporter-pretty`](https://github.com/vfile/vfile-reporter-pretty) + — create a pretty report +* [`vfile-reporter-junit`](https://github.com/kellyselden/vfile-reporter-junit) + — create a jUnit report +* [`vfile-reporter-position`](https://github.com/Hocdoc/vfile-reporter-position) + — create a report with content excerpts + +## Contribute + +See [`contributing.md`][contributing] in [`vfile/.github`][health] for ways to +get started. +See [`support.md`][support] for ways to get help. +Ideas for new utilities and tools can be posted in [`vfile/ideas`][ideas]. + +This project has a [code of conduct][coc]. +By interacting with this repository, organization, or community you agree to +abide by its terms. + +## Sponsor + +Support this effort and give back by sponsoring on [OpenCollective][collective]! + +<table> +<tr valign="middle"> +<td width="20%" align="center" colspan="2"> + <a href="https://www.gatsbyjs.org">Gatsby</a> 🥇<br><br> + <a href="https://www.gatsbyjs.org"><img src="https://avatars1.githubusercontent.com/u/12551863?s=256&v=4" width="128"></a> +</td> +<td width="20%" align="center" colspan="2"> + <a href="https://vercel.com">Vercel</a> 🥇<br><br> + <a href="https://vercel.com"><img src="https://avatars1.githubusercontent.com/u/14985020?s=256&v=4" width="128"></a> +</td> +<td width="20%" align="center" colspan="2"> + <a href="https://www.netlify.com">Netlify</a><br><br> + <!--OC has a sharper image--> + <a href="https://www.netlify.com"><img src="https://images.opencollective.com/netlify/4087de2/logo/256.png" width="128"></a> +</td> +<td width="10%" align="center"> + <a href="https://www.holloway.com">Holloway</a><br><br> + <a href="https://www.holloway.com"><img src="https://avatars1.githubusercontent.com/u/35904294?s=128&v=4" width="64"></a> +</td> +<td width="10%" align="center"> + <a href="https://themeisle.com">ThemeIsle</a><br><br> + <a href="https://themeisle.com"><img src="https://avatars1.githubusercontent.com/u/58979018?s=128&v=4" width="64"></a> +</td> +<td width="10%" align="center"> + <a href="https://boosthub.io">Boost Hub</a><br><br> + <a href="https://boosthub.io"><img src="https://images.opencollective.com/boosthub/6318083/logo/128.png" width="64"></a> +</td> +<td width="10%" align="center"> + <a href="https://expo.io">Expo</a><br><br> + <a href="https://expo.io"><img src="https://avatars1.githubusercontent.com/u/12504344?s=128&v=4" width="64"></a> +</td> +</tr> +<tr valign="middle"> +<td width="100%" align="center" colspan="10"> + <br> + <a href="https://opencollective.com/unified"><strong>You?</strong></a> + <br><br> +</td> +</tr> +</table> + +## Acknowledgments + +The initial release of this project was authored by +[**@wooorm**](https://github.com/wooorm). + +Thanks to [**@contra**](https://github.com/contra), +[**@phated**](https://github.com/phated), and others for their work on +[Vinyl][], which was a huge inspiration. + +Thanks to +[**@brendo**](https://github.com/brendo), +[**@shinnn**](https://github.com/shinnn), +[**@KyleAMathews**](https://github.com/KyleAMathews), +[**@sindresorhus**](https://github.com/sindresorhus), and +[**@denysdovhan**](https://github.com/denysdovhan) +for contributing commits since! + ## License [MIT][license] © [Titus Wormer][author] <!-- Definitions --> -[build-badge]: https://img.shields.io/travis/vfile/vfile.svg +[build-badge]: https://github.com/vfile/vfile/workflows/main/badge.svg -[build-status]: https://travis-ci.org/vfile/vfile +[build]: https://github.com/vfile/vfile/actions [coverage-badge]: https://img.shields.io/codecov/c/github/vfile/vfile.svg -[coverage-status]: https://codecov.io/github/vfile/vfile +[coverage]: https://codecov.io/github/vfile/vfile + +[downloads-badge]: https://img.shields.io/npm/dm/vfile.svg + +[downloads]: https://www.npmjs.com/package/vfile + +[size-badge]: https://img.shields.io/bundlephobia/minzip/vfile.svg + +[size]: https://bundlephobia.com/result?p=vfile + +[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg + +[backers-badge]: https://opencollective.com/unified/backers/badge.svg + +[collective]: https://opencollective.com/unified + +[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg + +[chat]: https://github.com/vfile/vfile/discussions [npm]: https://docs.npmjs.com/cli/install -[license]: LICENSE +[contributing]: https://github.com/vfile/.github/blob/HEAD/contributing.md -[author]: http://wooorm.com +[support]: https://github.com/vfile/.github/blob/HEAD/support.md -[vfile]: https://cdn.rawgit.com/vfile/vfile/f65510e/logo.svg +[health]: https://github.com/vfile/.github -[unified]: https://github.com/unifiedjs/unified +[coc]: https://github.com/vfile/.github/blob/HEAD/code-of-conduct.md -[retext]: https://github.com/wooorm/retext +[license]: license -[remark]: https://github.com/wooorm/remark +[author]: https://wooorm.com -[rehype]: https://github.com/wooorm/rehype +[unified]: https://github.com/unifiedjs/unified [vinyl]: https://github.com/gulpjs/vinyl +[site]: https://unifiedjs.com + +[twitter]: https://twitter.com/unifiedjs + +[contribute]: #contribute + +[sponsor]: #sponsor + [unist]: https://github.com/syntax-tree/unist#list-of-utilities [reporter]: https://github.com/vfile/vfile-reporter @@ -283,3 +416,9 @@ Calls [`#message()`][message] internally. [messages]: #vfilemessages [message]: #vfilemessagereason-position-origin + +[ideas]: https://github.com/vfile/ideas + +[encoding]: https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings + +[buffer]: https://nodejs.org/api/buffer.html diff --git a/node_modules/vfile/types/index.d.ts b/node_modules/vfile/types/index.d.ts new file mode 100644 index 00000000..e0d476e2 --- /dev/null +++ b/node_modules/vfile/types/index.d.ts @@ -0,0 +1,161 @@ +// TypeScript Version: 3.0 + +import * as Unist from 'unist' +import * as vfileMessage from 'vfile-message' + +declare namespace vfile { + /** + * Encodings supported by the buffer class + * + * @remarks + * This is a copy of the typing from Node, copied to prevent Node globals from being needed. + * Copied from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/a2bc1d868d81733a8969236655fa600bd3651a7b/types/node/globals.d.ts#L174 + */ + type BufferEncoding = + | 'ascii' + | 'utf8' + | 'utf-8' + | 'utf16le' + | 'ucs2' + | 'ucs-2' + | 'base64' + | 'latin1' + | 'binary' + | 'hex' + + /** + * VFileContents can either be text, or a Buffer like structure + * @remarks + * This does not directly use type `Buffer, because it can also be used in a browser context. + * Instead this leverages `Uint8Array` which is the base type for `Buffer`, and a native JavaScript construct. + */ + type VFileContents = string | Uint8Array + type VFileCompatible = VFile | VFileOptions | VFileContents + interface Settings { + [key: string]: unknown + } + type VFileReporter<T = Settings> = (files: VFile[], options: T) => string + + interface VFileOptions { + contents?: VFileContents + path?: string + basename?: string + stem?: string + extname?: string + dirname?: string + cwd?: string + data?: any + [key: string]: any + } + + interface VFile { + /** + * Create a new virtual file. If `options` is `string` or `Buffer`, treats it as `{contents: options}`. + * If `options` is a `VFile`, returns it. All other options are set on the newly created `vfile`. + * + * Path related properties are set in the following order (least specific to most specific): `history`, `path`, `basename`, `stem`, `extname`, `dirname`. + * + * It’s not possible to set either `dirname` or `extname` without setting either `history`, `path`, `basename`, or `stem` as well. + * + * @param options If `options` is `string` or `Buffer`, treats it as `{contents: options}`. If `options` is a `VFile`, returns it. All other options are set on the newly created `vfile`. + */ + <F extends VFile>(input?: VFileContents | F | VFileOptions): F + /** + * List of file-paths the file moved between. + */ + history: string[] + /** + * Place to store custom information. + * It's OK to store custom data directly on the `vfile`, moving it to `data` gives a little more privacy. + */ + data: unknown + /** + * List of messages associated with the file. + */ + messages: vfileMessage.VFileMessage[] + /** + * Raw value. + */ + contents: VFileContents + /** + * Path of `vfile`. + * Cannot be nullified. + */ + path?: string + /** + * Path to parent directory of `vfile`. + * Cannot be set if there's no `path` yet. + */ + dirname?: string + /** + * Current name (including extension) of `vfile`. + * Cannot contain path separators. + * Cannot be nullified either (use `file.path = file.dirname` instead). + */ + basename?: string + /** + * Name (without extension) of `vfile`. + * Cannot be nullified, and cannot contain path separators. + */ + stem?: string + /** + * Extension (with dot) of `vfile`. + * Cannot be set if there's no `path` yet and cannot contain path separators. + */ + extname?: string + /** + * Base of `path`. + * Defaults to `process.cwd()`. + */ + cwd: string + /** + * Convert contents of `vfile` to string. + * @param encoding If `contents` is a buffer, `encoding` is used to stringify buffers (default: `'utf8'`). + */ + toString: (encoding?: BufferEncoding) => string + /** + * Associates a message with the file for `reason` at `position`. + * When an error is passed in as `reason`, copies the stack. + * Each message has a `fatal` property which by default is set to `false` (ie. `warning`). + * @param reason Reason for message. Uses the stack and message of the error if given. + * @param position Place at which the message occurred in `vfile`. + * @param ruleId Category of message. + */ + message: ( + reason: string, + position?: Unist.Point | Unist.Position | Unist.Node, + ruleId?: string + ) => vfileMessage.VFileMessage + /** + * Associates a fatal message with the file, then immediately throws it. + * Note: fatal errors mean a file is no longer processable. + * Calls `message()` internally. + * @param reason Reason for message. Uses the stack and message of the error if given. + * @param position Place at which the message occurred in `vfile`. + * @param ruleId Category of message. + */ + fail: ( + reason: string, + position?: Unist.Point | Unist.Position | Unist.Node, + ruleId?: string + ) => never + /** + * Associates an informational message with the file, where `fatal` is set to `null`. + * Calls `message()` internally. + * @param reason Reason for message. Uses the stack and message of the error if given. + * @param position Place at which the message occurred in `vfile`. + * @param ruleId Category of message. + */ + info: ( + reason: string, + position?: Unist.Point | Unist.Position | Unist.Node, + ruleId?: string + ) => vfileMessage.VFileMessage + + [key: string]: unknown + } +} + +declare const vfile: vfile.VFile + +export = vfile diff --git a/node_modules/x-is-string/.npmignore b/node_modules/x-is-string/.npmignore deleted file mode 100644 index 8363b8e3..00000000 --- a/node_modules/x-is-string/.npmignore +++ /dev/null @@ -1,16 +0,0 @@ -.DS_Store -.monitor -.*.swp -.nodemonignore -releases -*.log -*.err -fleet.json -public/browserify -bin/*.json -.bin -build -compile -.lock-wscript -coverage -node_modules diff --git a/node_modules/x-is-string/.travis.yml b/node_modules/x-is-string/.travis.yml deleted file mode 100644 index 45734f2e..00000000 --- a/node_modules/x-is-string/.travis.yml +++ /dev/null @@ -1,8 +0,0 @@ -language: node_js -node_js: - - 0.8 - - "0.10" -before_script: - - npm install - - npm install istanbul coveralls -script: npm run travis-test diff --git a/node_modules/x-is-string/LICENCE b/node_modules/x-is-string/LICENCE deleted file mode 100644 index 0d083405..00000000 --- a/node_modules/x-is-string/LICENCE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2014 Matt-Esch. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/x-is-string/README.md b/node_modules/x-is-string/README.md deleted file mode 100644 index 99977d47..00000000 --- a/node_modules/x-is-string/README.md +++ /dev/null @@ -1,46 +0,0 @@ -# x-is-string - -Simple string test - -## Example - -```js -var isString = require("x-is-string") - -isString("hello") -// -> true - -isString("") -// -> true - -isString(new String("things")) -// -> true - -isString(1) -// -> false - -isString(true) -// -> false - -isString(new Date()) -// -> false - -isString({}) -// -> false - -isString(null) -// -> false - -isString(undefined) -// -> false -``` - -## Installation - -`npm install x-is-string` - -## Contributors - - - Matt-Esch - -## MIT Licenced \ No newline at end of file diff --git a/node_modules/x-is-string/index.js b/node_modules/x-is-string/index.js deleted file mode 100644 index 090130d4..00000000 --- a/node_modules/x-is-string/index.js +++ /dev/null @@ -1,7 +0,0 @@ -var toString = Object.prototype.toString - -module.exports = isString - -function isString(obj) { - return toString.call(obj) === "[object String]" -} diff --git a/node_modules/x-is-string/package.json b/node_modules/x-is-string/package.json deleted file mode 100644 index ea267ce3..00000000 --- a/node_modules/x-is-string/package.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "name": "x-is-string", - "version": "0.1.0", - "description": "Simple string test", - "keywords": [], - "author": "Matt-Esch <matt@mattesch.info>", - "repository": "git://github.com/Matt-Esch/x-is-string.git", - "main": "index", - "homepage": "https://github.com/Matt-Esch/x-is-string", - "contributors": [ - { - "name": "Matt-Esch" - } - ], - "bugs": { - "url": "https://github.com/Matt-Esch/x-is-string/issues", - "email": "matt@mattesch.info" - }, - "dependencies": {}, - "devDependencies": { - "tape": "^2.12.2" - }, - "licenses": [ - { - "type": "MIT", - "url": "http://github.com/Matt-Esch/x-is-string/raw/master/LICENSE" - } - ], - "scripts": { - "test": "node ./test/index.js", - "start": "node ./index.js", - "watch": "nodemon -w ./index.js index.js", - "travis-test": "istanbul cover ./test/index.js && ((cat coverage/lcov.info | coveralls) || exit 0)", - "cover": "istanbul cover --report none --print detail ./test/index.js", - "view-cover": "istanbul report html && google-chrome ./coverage/index.html", - "test-browser": "testem-browser ./test/browser/index.js", - "testem": "testem-both -b=./test/browser/index.js" - }, - "testling": { - "files": "test/index.js", - "browsers": [ - "ie/8..latest", - "firefox/16..latest", - "firefox/nightly", - "chrome/22..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest", - "android-browser/4.2..latest" - ] - } -} diff --git a/node_modules/x-is-string/test/index.js b/node_modules/x-is-string/test/index.js deleted file mode 100644 index 7caa5ab4..00000000 --- a/node_modules/x-is-string/test/index.js +++ /dev/null @@ -1,51 +0,0 @@ -var test = require("tape") - -var isString = require("../index") - -test("isString is a function", function (assert) { - assert.equal(typeof isString, "function") - assert.end() -}) - -test("string literal is truthy", function (assert) { - assert.equal(isString("hello"), true) - assert.end() -}) - -test("empty string is truthy", function (assert) { - assert.equal(isString(""), true) - assert.end() -}) - -test("string object is truthy", function (assert) { - assert.equal(isString(new String("hello")), true) - assert.end() -}) - -test("number is falsey", function (assert) { - assert.equal(isString(9), false) - assert.end() -}) - -test("boolean is falsey", function (assert) { - assert.equal(isString(true), false) - assert.end() -}) - -test("date is falsey", function (assert) { - assert.equal(isString(new Date()), false) - assert.end() -}) - -test("object is falsey", function (assert) { - assert.equal(isString({}), false) - assert.end() -}) -test("null is falsey", function (assert) { - assert.equal(isString(null), false) - assert.end() -}) -test("undefined is falsey", function (assert) { - assert.equal(isString(undefined), false) - assert.end() -}) diff --git a/node_modules/xtend/.jshintrc b/node_modules/xtend/.jshintrc deleted file mode 100644 index 77887b5f..00000000 --- a/node_modules/xtend/.jshintrc +++ /dev/null @@ -1,30 +0,0 @@ -{ - "maxdepth": 4, - "maxstatements": 200, - "maxcomplexity": 12, - "maxlen": 80, - "maxparams": 5, - - "curly": true, - "eqeqeq": true, - "immed": true, - "latedef": false, - "noarg": true, - "noempty": true, - "nonew": true, - "undef": true, - "unused": "vars", - "trailing": true, - - "quotmark": true, - "expr": true, - "asi": true, - - "browser": false, - "esnext": true, - "devel": false, - "node": false, - "nonstandard": false, - - "predef": ["require", "module", "__dirname", "__filename"] -} diff --git a/node_modules/xtend/LICENSE b/node_modules/xtend/LICENSE deleted file mode 100644 index 0099f4f6..00000000 --- a/node_modules/xtend/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) -Copyright (c) 2012-2014 Raynos. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/xtend/README.md b/node_modules/xtend/README.md deleted file mode 100644 index 4a2703cf..00000000 --- a/node_modules/xtend/README.md +++ /dev/null @@ -1,32 +0,0 @@ -# xtend - -[![browser support][3]][4] - -[![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges) - -Extend like a boss - -xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence. - -## Examples - -```js -var extend = require("xtend") - -// extend returns a new object. Does not mutate arguments -var combination = extend({ - a: "a", - b: "c" -}, { - b: "b" -}) -// { a: "a", b: "b" } -``` - -## Stability status: Locked - -## MIT Licensed - - - [3]: http://ci.testling.com/Raynos/xtend.png - [4]: http://ci.testling.com/Raynos/xtend diff --git a/node_modules/xtend/immutable.js b/node_modules/xtend/immutable.js deleted file mode 100644 index 94889c9d..00000000 --- a/node_modules/xtend/immutable.js +++ /dev/null @@ -1,19 +0,0 @@ -module.exports = extend - -var hasOwnProperty = Object.prototype.hasOwnProperty; - -function extend() { - var target = {} - - for (var i = 0; i < arguments.length; i++) { - var source = arguments[i] - - for (var key in source) { - if (hasOwnProperty.call(source, key)) { - target[key] = source[key] - } - } - } - - return target -} diff --git a/node_modules/xtend/mutable.js b/node_modules/xtend/mutable.js deleted file mode 100644 index 72debede..00000000 --- a/node_modules/xtend/mutable.js +++ /dev/null @@ -1,17 +0,0 @@ -module.exports = extend - -var hasOwnProperty = Object.prototype.hasOwnProperty; - -function extend(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] - - for (var key in source) { - if (hasOwnProperty.call(source, key)) { - target[key] = source[key] - } - } - } - - return target -} diff --git a/node_modules/xtend/package.json b/node_modules/xtend/package.json deleted file mode 100644 index f7a39d10..00000000 --- a/node_modules/xtend/package.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "name": "xtend", - "version": "4.0.2", - "description": "extend like a boss", - "keywords": [ - "extend", - "merge", - "options", - "opts", - "object", - "array" - ], - "author": "Raynos <raynos2@gmail.com>", - "repository": "git://github.com/Raynos/xtend.git", - "main": "immutable", - "scripts": { - "test": "node test" - }, - "dependencies": {}, - "devDependencies": { - "tape": "~1.1.0" - }, - "homepage": "https://github.com/Raynos/xtend", - "contributors": [ - { - "name": "Jake Verbaten" - }, - { - "name": "Matt Esch" - } - ], - "bugs": { - "url": "https://github.com/Raynos/xtend/issues", - "email": "raynos2@gmail.com" - }, - "license": "MIT", - "testling": { - "files": "test.js", - "browsers": [ - "ie/7..latest", - "firefox/16..latest", - "firefox/nightly", - "chrome/22..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest" - ] - }, - "engines": { - "node": ">=0.4" - } -} diff --git a/node_modules/xtend/test.js b/node_modules/xtend/test.js deleted file mode 100644 index b895b42b..00000000 --- a/node_modules/xtend/test.js +++ /dev/null @@ -1,103 +0,0 @@ -var test = require("tape") -var extend = require("./") -var mutableExtend = require("./mutable") - -test("merge", function(assert) { - var a = { a: "foo" } - var b = { b: "bar" } - - assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) - assert.end() -}) - -test("replace", function(assert) { - var a = { a: "foo" } - var b = { a: "bar" } - - assert.deepEqual(extend(a, b), { a: "bar" }) - assert.end() -}) - -test("undefined", function(assert) { - var a = { a: undefined } - var b = { b: "foo" } - - assert.deepEqual(extend(a, b), { a: undefined, b: "foo" }) - assert.deepEqual(extend(b, a), { a: undefined, b: "foo" }) - assert.end() -}) - -test("handle 0", function(assert) { - var a = { a: "default" } - var b = { a: 0 } - - assert.deepEqual(extend(a, b), { a: 0 }) - assert.deepEqual(extend(b, a), { a: "default" }) - assert.end() -}) - -test("is immutable", function (assert) { - var record = {} - - extend(record, { foo: "bar" }) - assert.equal(record.foo, undefined) - assert.end() -}) - -test("null as argument", function (assert) { - var a = { foo: "bar" } - var b = null - var c = void 0 - - assert.deepEqual(extend(b, a, c), { foo: "bar" }) - assert.end() -}) - -test("mutable", function (assert) { - var a = { foo: "bar" } - - mutableExtend(a, { bar: "baz" }) - - assert.equal(a.bar, "baz") - assert.end() -}) - -test("null prototype", function(assert) { - var a = { a: "foo" } - var b = Object.create(null) - b.b = "bar"; - - assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) - assert.end() -}) - -test("null prototype mutable", function (assert) { - var a = { foo: "bar" } - var b = Object.create(null) - b.bar = "baz"; - - mutableExtend(a, b) - - assert.equal(a.bar, "baz") - assert.end() -}) - -test("prototype pollution", function (assert) { - var a = {} - var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' - - assert.strictEqual(a.oops, undefined) - extend({}, maliciousPayload) - assert.strictEqual(a.oops, undefined) - assert.end() -}) - -test("prototype pollution mutable", function (assert) { - var a = {} - var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' - - assert.strictEqual(a.oops, undefined) - mutableExtend({}, maliciousPayload) - assert.strictEqual(a.oops, undefined) - assert.end() -}) diff --git a/node_modules/zwitch/index.js b/node_modules/zwitch/index.js new file mode 100644 index 00000000..c6064901 --- /dev/null +++ b/node_modules/zwitch/index.js @@ -0,0 +1,28 @@ +'use strict' + +module.exports = factory + +var noop = Function.prototype +var own = {}.hasOwnProperty + +// Handle values based on a property. +function factory(key, options) { + var settings = options || {} + + function one(value) { + var fn = one.invalid + var handlers = one.handlers + + if (value && own.call(value, key)) { + fn = own.call(handlers, value[key]) ? handlers[value[key]] : one.unknown + } + + return (fn || noop).apply(this, arguments) + } + + one.handlers = settings.handlers || {} + one.invalid = settings.invalid + one.unknown = settings.unknown + + return one +} diff --git a/node_modules/is-whitespace-character/license b/node_modules/zwitch/license similarity index 100% rename from node_modules/is-whitespace-character/license rename to node_modules/zwitch/license diff --git a/node_modules/state-toggle/package.json b/node_modules/zwitch/package.json similarity index 74% rename from node_modules/state-toggle/package.json rename to node_modules/zwitch/package.json index 1111db06..d139f2dc 100644 --- a/node_modules/state-toggle/package.json +++ b/node_modules/zwitch/package.json @@ -1,15 +1,15 @@ { - "name": "state-toggle", - "version": "1.0.3", - "description": "Enter/exit a state", + "name": "zwitch", + "version": "1.0.5", + "description": "Handle values based on a property", "license": "MIT", "keywords": [ - "enter", - "exit", - "state" + "handle", + "switch", + "property" ], - "repository": "wooorm/state-toggle", - "bugs": "https://github.com/wooorm/state-toggle/issues", + "repository": "wooorm/zwitch", + "bugs": "https://github.com/wooorm/zwitch/issues", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -34,8 +34,8 @@ }, "scripts": { "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", - "build-bundle": "browserify . -s stateToggle -o state-toggle.js", - "build-mangle": "browserify . -s stateToggle -p tinyify -o state-toggle.min.js", + "build-bundle": "browserify . -s zwitch -o zwitch.js", + "build-mangle": "browserify . -s zwitch -p tinyify -o zwitch.min.js", "build": "npm run build-bundle && npm run build-mangle", "test-api": "node test", "test-coverage": "nyc --reporter lcov tape test.js", @@ -52,13 +52,11 @@ "xo": { "prettier": true, "esnext": false, + "rules": { + "unicorn/prefer-reflect-apply": "off" + }, "ignores": [ - "state-toggle.js" - ] - }, - "remarkConfig": { - "plugins": [ - "preset-wooorm" + "zwitch.js" ] }, "nyc": { @@ -66,5 +64,10 @@ "lines": 100, "functions": 100, "branches": 100 + }, + "remarkConfig": { + "plugins": [ + "preset-wooorm" + ] } } diff --git a/node_modules/zwitch/readme.md b/node_modules/zwitch/readme.md new file mode 100644 index 00000000..fb17a53a --- /dev/null +++ b/node_modules/zwitch/readme.md @@ -0,0 +1,143 @@ +# zwitch + +[![Build][build-badge]][build] +[![Coverage][coverage-badge]][coverage] +[![Downloads][downloads-badge]][downloads] +[![Size][size-badge]][size] + +Handle values based on a property. + +## Install + +[npm][]: + +```sh +npm install zwitch +``` + +## Use + +```js +var zwitch = require('zwitch') + +var handle = zwitch('type') + +handle.invalid = invalid +handle.unknown = unknown +handle.handlers.alpha = handle + +handle({type: 'alpha'}) +``` + +Or, with a `switch` statement: + +```javascript +function handle(value) { + var fn + + if (!value || typeof value !== 'object' || !('type' in value)) { + fn = invalid + } else { + switch (value.type) { + case 'alpha': + fn = handle + break + default: + fn = unknown + break + } + } + + return fn.apply(this, arguments) +} + +handle({type: 'alpha'}) +``` + +## API + +### `zwitch(key[, options])` + +Create a functional switch, based on a `key` (`string`). + +###### `options` + +Options can be omitted and added later to `one`. + +* `handlers` (`Object.<Function>`, optional) + — Object mapping values to handle, stored on `one.handlers` +* `invalid` (`Function`, optional) + — Handle values without `key`, stored on `one.invalid` +* `unknown` (`Function`, optional) + — Handle values with an unhandled `key`, stored on `one.unknown` + +###### Returns + +`Function` — See [`one`][one]. + +#### `one(value[, rest...])` + +Handle one value. Based on the bound `key`, a respective handler will be +invoked. +If `value` is not an object, or doesn’t have a `key` property, the special +“invalid” handler will be invoked. +If `value` has an unknown `key`, the special “unknown” handler will be invoked. + +All arguments, and the context object, are passed through to the [handler][], +and it’s result is returned. + +#### `one.handlers` + +Map of [handler][]s (`Object.<string, Function>`). + +#### `one.invalid` + +Special [`handler`][handler] invoked if a value doesn’t have a `key` property. +If not set, `undefined` is returned for invalid values. + +#### `one.unknown` + +Special [`handler`][handler] invoked if a value does not have a matching +handler. +If not set, `undefined` is returned for unknown values. + +### `function handler(value[, rest...])` + +Handle one value. + +## Related + +* [`mapz`](https://github.com/wooorm/mapz) + — Functional map + +## License + +[MIT][license] © [Titus Wormer][author] + +<!-- Definitions --> + +[build-badge]: https://img.shields.io/travis/wooorm/zwitch.svg + +[build]: https://travis-ci.org/wooorm/zwitch + +[coverage-badge]: https://img.shields.io/codecov/c/github/wooorm/zwitch.svg + +[coverage]: https://codecov.io/github/wooorm/zwitch + +[downloads-badge]: https://img.shields.io/npm/dm/zwitch.svg + +[downloads]: https://www.npmjs.com/package/zwitch + +[size-badge]: https://img.shields.io/bundlephobia/minzip/zwitch.svg + +[size]: https://bundlephobia.com/result?p=zwitch + +[npm]: https://docs.npmjs.com/cli/install + +[license]: license + +[author]: https://wooorm.com + +[one]: #onevalue-rest + +[handler]: #function-handlervalue-rest